| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 8.0, | |
| "eval_steps": 500, | |
| "global_step": 7000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.011428571428571429, | |
| "grad_norm": 2.3897457122802734, | |
| "learning_rate": 4.9935714285714285e-05, | |
| "loss": 5.5799, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.022857142857142857, | |
| "grad_norm": 1.2946193218231201, | |
| "learning_rate": 4.986428571428572e-05, | |
| "loss": 2.9562, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03428571428571429, | |
| "grad_norm": 1.235308051109314, | |
| "learning_rate": 4.9792857142857146e-05, | |
| "loss": 2.2701, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.045714285714285714, | |
| "grad_norm": 1.25638747215271, | |
| "learning_rate": 4.972142857142858e-05, | |
| "loss": 1.6642, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.05714285714285714, | |
| "grad_norm": 1.275929570198059, | |
| "learning_rate": 4.965e-05, | |
| "loss": 1.2349, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06857142857142857, | |
| "grad_norm": 1.2436413764953613, | |
| "learning_rate": 4.957857142857143e-05, | |
| "loss": 0.9634, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.1808298826217651, | |
| "learning_rate": 4.9507142857142855e-05, | |
| "loss": 0.7025, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.09142857142857143, | |
| "grad_norm": 1.0625460147857666, | |
| "learning_rate": 4.943571428571429e-05, | |
| "loss": 0.5385, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.10285714285714286, | |
| "grad_norm": 0.8773331046104431, | |
| "learning_rate": 4.936428571428572e-05, | |
| "loss": 0.418, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.11428571428571428, | |
| "grad_norm": 0.8841960430145264, | |
| "learning_rate": 4.929285714285715e-05, | |
| "loss": 0.3878, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.12571428571428572, | |
| "grad_norm": 0.7253878116607666, | |
| "learning_rate": 4.922142857142857e-05, | |
| "loss": 0.3189, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.13714285714285715, | |
| "grad_norm": 0.6557711958885193, | |
| "learning_rate": 4.915e-05, | |
| "loss": 0.2695, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.14857142857142858, | |
| "grad_norm": 0.6099198460578918, | |
| "learning_rate": 4.9078571428571426e-05, | |
| "loss": 0.2424, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.6418436169624329, | |
| "learning_rate": 4.900714285714286e-05, | |
| "loss": 0.2242, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.17142857142857143, | |
| "grad_norm": 0.6211830973625183, | |
| "learning_rate": 4.893571428571429e-05, | |
| "loss": 0.1925, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.18285714285714286, | |
| "grad_norm": 0.5805016756057739, | |
| "learning_rate": 4.886428571428572e-05, | |
| "loss": 0.1856, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.19428571428571428, | |
| "grad_norm": 0.5227402448654175, | |
| "learning_rate": 4.879285714285715e-05, | |
| "loss": 0.1676, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.2057142857142857, | |
| "grad_norm": 0.5591551661491394, | |
| "learning_rate": 4.872142857142857e-05, | |
| "loss": 0.1706, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.21714285714285714, | |
| "grad_norm": 0.47619232535362244, | |
| "learning_rate": 4.8650000000000003e-05, | |
| "loss": 0.1589, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.22857142857142856, | |
| "grad_norm": 0.48772159218788147, | |
| "learning_rate": 4.857857142857143e-05, | |
| "loss": 0.1654, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.48056119680404663, | |
| "learning_rate": 4.8507142857142865e-05, | |
| "loss": 0.1501, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.25142857142857145, | |
| "grad_norm": 0.4571131467819214, | |
| "learning_rate": 4.843571428571429e-05, | |
| "loss": 0.1413, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.26285714285714284, | |
| "grad_norm": 0.5747288465499878, | |
| "learning_rate": 4.836428571428572e-05, | |
| "loss": 0.1258, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2742857142857143, | |
| "grad_norm": 0.4191080629825592, | |
| "learning_rate": 4.8292857142857143e-05, | |
| "loss": 0.1182, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.2857142857142857, | |
| "grad_norm": 0.4493291974067688, | |
| "learning_rate": 4.8221428571428574e-05, | |
| "loss": 0.1258, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.29714285714285715, | |
| "grad_norm": 0.3831436038017273, | |
| "learning_rate": 4.815e-05, | |
| "loss": 0.1273, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.30857142857142855, | |
| "grad_norm": 0.3865627944469452, | |
| "learning_rate": 4.807857142857143e-05, | |
| "loss": 0.1119, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.42649543285369873, | |
| "learning_rate": 4.800714285714286e-05, | |
| "loss": 0.1166, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.3314285714285714, | |
| "grad_norm": 0.4094502925872803, | |
| "learning_rate": 4.793571428571429e-05, | |
| "loss": 0.1142, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.34285714285714286, | |
| "grad_norm": 0.3270905315876007, | |
| "learning_rate": 4.7864285714285714e-05, | |
| "loss": 0.1123, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.35428571428571426, | |
| "grad_norm": 0.35536834597587585, | |
| "learning_rate": 4.7792857142857145e-05, | |
| "loss": 0.1003, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.3657142857142857, | |
| "grad_norm": 0.41495510935783386, | |
| "learning_rate": 4.7721428571428576e-05, | |
| "loss": 0.099, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.37714285714285717, | |
| "grad_norm": 0.470359206199646, | |
| "learning_rate": 4.765e-05, | |
| "loss": 0.0969, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.38857142857142857, | |
| "grad_norm": 0.4018060266971588, | |
| "learning_rate": 4.757857142857143e-05, | |
| "loss": 0.0955, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.34157299995422363, | |
| "learning_rate": 4.750714285714286e-05, | |
| "loss": 0.0979, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4114285714285714, | |
| "grad_norm": 0.31011414527893066, | |
| "learning_rate": 4.743571428571429e-05, | |
| "loss": 0.0869, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.4228571428571429, | |
| "grad_norm": 0.26191529631614685, | |
| "learning_rate": 4.7364285714285716e-05, | |
| "loss": 0.0846, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4342857142857143, | |
| "grad_norm": 0.30978405475616455, | |
| "learning_rate": 4.7292857142857146e-05, | |
| "loss": 0.087, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.44571428571428573, | |
| "grad_norm": 0.38050106167793274, | |
| "learning_rate": 4.722142857142857e-05, | |
| "loss": 0.0786, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.45714285714285713, | |
| "grad_norm": 0.24444682896137238, | |
| "learning_rate": 4.715e-05, | |
| "loss": 0.0811, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4685714285714286, | |
| "grad_norm": 0.31481674313545227, | |
| "learning_rate": 4.707857142857143e-05, | |
| "loss": 0.0818, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.3219773471355438, | |
| "learning_rate": 4.700714285714286e-05, | |
| "loss": 0.0775, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.49142857142857144, | |
| "grad_norm": 0.2883884310722351, | |
| "learning_rate": 4.6935714285714286e-05, | |
| "loss": 0.0697, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5028571428571429, | |
| "grad_norm": 0.33833104372024536, | |
| "learning_rate": 4.686428571428572e-05, | |
| "loss": 0.0797, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5142857142857142, | |
| "grad_norm": 0.23762065172195435, | |
| "learning_rate": 4.679285714285714e-05, | |
| "loss": 0.0755, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.5257142857142857, | |
| "grad_norm": 0.32968372106552124, | |
| "learning_rate": 4.672142857142857e-05, | |
| "loss": 0.0795, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5371428571428571, | |
| "grad_norm": 0.29430899024009705, | |
| "learning_rate": 4.665e-05, | |
| "loss": 0.0705, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5485714285714286, | |
| "grad_norm": 0.3451765179634094, | |
| "learning_rate": 4.657857142857143e-05, | |
| "loss": 0.067, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.30210795998573303, | |
| "learning_rate": 4.650714285714286e-05, | |
| "loss": 0.0701, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5714285714285714, | |
| "grad_norm": 0.2613590359687805, | |
| "learning_rate": 4.643571428571429e-05, | |
| "loss": 0.0649, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5828571428571429, | |
| "grad_norm": 0.43155479431152344, | |
| "learning_rate": 4.636428571428572e-05, | |
| "loss": 0.0648, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.5942857142857143, | |
| "grad_norm": 0.38308286666870117, | |
| "learning_rate": 4.629285714285714e-05, | |
| "loss": 0.0584, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.6057142857142858, | |
| "grad_norm": 0.2277926206588745, | |
| "learning_rate": 4.622142857142857e-05, | |
| "loss": 0.0708, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.6171428571428571, | |
| "grad_norm": 0.21378019452095032, | |
| "learning_rate": 4.6150000000000004e-05, | |
| "loss": 0.0624, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.6285714285714286, | |
| "grad_norm": 0.23331303894519806, | |
| "learning_rate": 4.6078571428571434e-05, | |
| "loss": 0.062, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.2659645080566406, | |
| "learning_rate": 4.600714285714286e-05, | |
| "loss": 0.0658, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.6514285714285715, | |
| "grad_norm": 0.30504903197288513, | |
| "learning_rate": 4.593571428571429e-05, | |
| "loss": 0.0605, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.6628571428571428, | |
| "grad_norm": 0.3272831439971924, | |
| "learning_rate": 4.586428571428571e-05, | |
| "loss": 0.0667, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.6742857142857143, | |
| "grad_norm": 0.22067314386367798, | |
| "learning_rate": 4.5792857142857144e-05, | |
| "loss": 0.0649, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.6857142857142857, | |
| "grad_norm": 0.3015865087509155, | |
| "learning_rate": 4.5721428571428574e-05, | |
| "loss": 0.0639, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6971428571428572, | |
| "grad_norm": 0.2650837302207947, | |
| "learning_rate": 4.5650000000000005e-05, | |
| "loss": 0.0614, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.7085714285714285, | |
| "grad_norm": 0.2098853588104248, | |
| "learning_rate": 4.557857142857143e-05, | |
| "loss": 0.0622, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.21571612358093262, | |
| "learning_rate": 4.550714285714286e-05, | |
| "loss": 0.0557, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.7314285714285714, | |
| "grad_norm": 0.23635227978229523, | |
| "learning_rate": 4.5435714285714284e-05, | |
| "loss": 0.0566, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.7428571428571429, | |
| "grad_norm": 0.20776158571243286, | |
| "learning_rate": 4.5364285714285714e-05, | |
| "loss": 0.06, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.7542857142857143, | |
| "grad_norm": 0.2526012063026428, | |
| "learning_rate": 4.5292857142857145e-05, | |
| "loss": 0.0628, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.7657142857142857, | |
| "grad_norm": 0.18527157604694366, | |
| "learning_rate": 4.5221428571428576e-05, | |
| "loss": 0.0582, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.7771428571428571, | |
| "grad_norm": 0.2718740701675415, | |
| "learning_rate": 4.5150000000000006e-05, | |
| "loss": 0.0582, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.7885714285714286, | |
| "grad_norm": 0.19608113169670105, | |
| "learning_rate": 4.507857142857143e-05, | |
| "loss": 0.0589, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.2381214052438736, | |
| "learning_rate": 4.500714285714286e-05, | |
| "loss": 0.0528, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.8114285714285714, | |
| "grad_norm": 0.22083961963653564, | |
| "learning_rate": 4.4935714285714285e-05, | |
| "loss": 0.0619, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.8228571428571428, | |
| "grad_norm": 0.253137469291687, | |
| "learning_rate": 4.4864285714285716e-05, | |
| "loss": 0.0589, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.8342857142857143, | |
| "grad_norm": 0.24962878227233887, | |
| "learning_rate": 4.4792857142857146e-05, | |
| "loss": 0.0599, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.8457142857142858, | |
| "grad_norm": 0.2471620887517929, | |
| "learning_rate": 4.472142857142858e-05, | |
| "loss": 0.0607, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.8571428571428571, | |
| "grad_norm": 0.22191166877746582, | |
| "learning_rate": 4.465e-05, | |
| "loss": 0.0563, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.8685714285714285, | |
| "grad_norm": 0.24587008357048035, | |
| "learning_rate": 4.457857142857143e-05, | |
| "loss": 0.0552, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.2442178726196289, | |
| "learning_rate": 4.4507142857142856e-05, | |
| "loss": 0.0573, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.8914285714285715, | |
| "grad_norm": 0.17938025295734406, | |
| "learning_rate": 4.4435714285714286e-05, | |
| "loss": 0.0581, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.9028571428571428, | |
| "grad_norm": 0.20533467829227448, | |
| "learning_rate": 4.436428571428572e-05, | |
| "loss": 0.0551, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.9142857142857143, | |
| "grad_norm": 0.18826663494110107, | |
| "learning_rate": 4.429285714285715e-05, | |
| "loss": 0.0585, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.9257142857142857, | |
| "grad_norm": 0.2727915346622467, | |
| "learning_rate": 4.422142857142857e-05, | |
| "loss": 0.0569, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.9371428571428572, | |
| "grad_norm": 0.300370454788208, | |
| "learning_rate": 4.415e-05, | |
| "loss": 0.0549, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.9485714285714286, | |
| "grad_norm": 0.3136334717273712, | |
| "learning_rate": 4.407857142857143e-05, | |
| "loss": 0.0551, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.25469839572906494, | |
| "learning_rate": 4.400714285714286e-05, | |
| "loss": 0.0499, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.9714285714285714, | |
| "grad_norm": 0.17637017369270325, | |
| "learning_rate": 4.393571428571429e-05, | |
| "loss": 0.0544, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.9828571428571429, | |
| "grad_norm": 0.2047278881072998, | |
| "learning_rate": 4.386428571428572e-05, | |
| "loss": 0.0534, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.9942857142857143, | |
| "grad_norm": 0.19194772839546204, | |
| "learning_rate": 4.379285714285715e-05, | |
| "loss": 0.0499, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_bleu": 0.07, | |
| "eval_exact_match": 0.0, | |
| "eval_loss": 0.039771415293216705, | |
| "eval_runtime": 343.2775, | |
| "eval_samples_per_second": 17.479, | |
| "eval_steps_per_second": 1.092, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.0057142857142858, | |
| "grad_norm": 0.25923219323158264, | |
| "learning_rate": 4.372142857142857e-05, | |
| "loss": 0.0535, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.0171428571428571, | |
| "grad_norm": 0.19547821581363678, | |
| "learning_rate": 4.3650000000000004e-05, | |
| "loss": 0.0516, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.0285714285714285, | |
| "grad_norm": 0.16015152633190155, | |
| "learning_rate": 4.357857142857143e-05, | |
| "loss": 0.0502, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.19731080532073975, | |
| "learning_rate": 4.350714285714286e-05, | |
| "loss": 0.0482, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.0514285714285714, | |
| "grad_norm": 0.17309032380580902, | |
| "learning_rate": 4.343571428571428e-05, | |
| "loss": 0.0461, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.062857142857143, | |
| "grad_norm": 0.17293015122413635, | |
| "learning_rate": 4.336428571428572e-05, | |
| "loss": 0.0521, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.0742857142857143, | |
| "grad_norm": 0.1969359666109085, | |
| "learning_rate": 4.3292857142857144e-05, | |
| "loss": 0.0499, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.0857142857142856, | |
| "grad_norm": 0.14096097648143768, | |
| "learning_rate": 4.3221428571428575e-05, | |
| "loss": 0.0559, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.0971428571428572, | |
| "grad_norm": 0.24003499746322632, | |
| "learning_rate": 4.315e-05, | |
| "loss": 0.0541, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.1085714285714285, | |
| "grad_norm": 0.1646154224872589, | |
| "learning_rate": 4.307857142857143e-05, | |
| "loss": 0.0554, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 0.15991230309009552, | |
| "learning_rate": 4.300714285714286e-05, | |
| "loss": 0.052, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.1314285714285715, | |
| "grad_norm": 0.1888715773820877, | |
| "learning_rate": 4.293571428571429e-05, | |
| "loss": 0.0478, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.1428571428571428, | |
| "grad_norm": 0.15449759364128113, | |
| "learning_rate": 4.2864285714285715e-05, | |
| "loss": 0.0496, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.1542857142857144, | |
| "grad_norm": 0.12944145500659943, | |
| "learning_rate": 4.2792857142857145e-05, | |
| "loss": 0.0482, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.1657142857142857, | |
| "grad_norm": 0.14871393144130707, | |
| "learning_rate": 4.2721428571428576e-05, | |
| "loss": 0.0512, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.177142857142857, | |
| "grad_norm": 0.17820103466510773, | |
| "learning_rate": 4.265e-05, | |
| "loss": 0.0475, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.1885714285714286, | |
| "grad_norm": 0.1732417643070221, | |
| "learning_rate": 4.257857142857143e-05, | |
| "loss": 0.0503, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.19619877636432648, | |
| "learning_rate": 4.2507142857142855e-05, | |
| "loss": 0.0481, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.2114285714285715, | |
| "grad_norm": 0.35855549573898315, | |
| "learning_rate": 4.243571428571429e-05, | |
| "loss": 0.0525, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.2228571428571429, | |
| "grad_norm": 0.22570298612117767, | |
| "learning_rate": 4.2364285714285716e-05, | |
| "loss": 0.0507, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.2342857142857142, | |
| "grad_norm": 0.26217854022979736, | |
| "learning_rate": 4.229285714285715e-05, | |
| "loss": 0.0506, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.2457142857142858, | |
| "grad_norm": 0.19021452963352203, | |
| "learning_rate": 4.222142857142857e-05, | |
| "loss": 0.0487, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.2571428571428571, | |
| "grad_norm": 0.2223762720823288, | |
| "learning_rate": 4.215e-05, | |
| "loss": 0.0491, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.2685714285714287, | |
| "grad_norm": 0.13785573840141296, | |
| "learning_rate": 4.2078571428571425e-05, | |
| "loss": 0.0462, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.15734604001045227, | |
| "learning_rate": 4.200714285714286e-05, | |
| "loss": 0.0448, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.2914285714285714, | |
| "grad_norm": 0.5288769006729126, | |
| "learning_rate": 4.193571428571429e-05, | |
| "loss": 0.0501, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.302857142857143, | |
| "grad_norm": 0.17690952122211456, | |
| "learning_rate": 4.186428571428572e-05, | |
| "loss": 0.0453, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.3142857142857143, | |
| "grad_norm": 0.17811287939548492, | |
| "learning_rate": 4.179285714285715e-05, | |
| "loss": 0.0478, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.3257142857142856, | |
| "grad_norm": 0.19255459308624268, | |
| "learning_rate": 4.172142857142857e-05, | |
| "loss": 0.0477, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.3371428571428572, | |
| "grad_norm": 0.16269567608833313, | |
| "learning_rate": 4.165e-05, | |
| "loss": 0.0438, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.3485714285714285, | |
| "grad_norm": 0.16998130083084106, | |
| "learning_rate": 4.157857142857143e-05, | |
| "loss": 0.046, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.3599999999999999, | |
| "grad_norm": 0.21180035173892975, | |
| "learning_rate": 4.1507142857142864e-05, | |
| "loss": 0.0443, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.3714285714285714, | |
| "grad_norm": 0.24145734310150146, | |
| "learning_rate": 4.143571428571429e-05, | |
| "loss": 0.0438, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.3828571428571428, | |
| "grad_norm": 0.13968069851398468, | |
| "learning_rate": 4.136428571428572e-05, | |
| "loss": 0.0471, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.3942857142857144, | |
| "grad_norm": 0.16085991263389587, | |
| "learning_rate": 4.129285714285714e-05, | |
| "loss": 0.0472, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.4057142857142857, | |
| "grad_norm": 0.3225364685058594, | |
| "learning_rate": 4.1221428571428573e-05, | |
| "loss": 0.044, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.4171428571428573, | |
| "grad_norm": 0.11656186729669571, | |
| "learning_rate": 4.115e-05, | |
| "loss": 0.0447, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.4285714285714286, | |
| "grad_norm": 0.16806724667549133, | |
| "learning_rate": 4.1078571428571435e-05, | |
| "loss": 0.0441, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.22123603522777557, | |
| "learning_rate": 4.100714285714286e-05, | |
| "loss": 0.043, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.4514285714285715, | |
| "grad_norm": 0.14784376323223114, | |
| "learning_rate": 4.093571428571429e-05, | |
| "loss": 0.0439, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.4628571428571429, | |
| "grad_norm": 0.1381123811006546, | |
| "learning_rate": 4.0864285714285713e-05, | |
| "loss": 0.0409, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.4742857142857142, | |
| "grad_norm": 0.14749744534492493, | |
| "learning_rate": 4.0792857142857144e-05, | |
| "loss": 0.0412, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.4857142857142858, | |
| "grad_norm": 0.16648408770561218, | |
| "learning_rate": 4.0721428571428575e-05, | |
| "loss": 0.0429, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.497142857142857, | |
| "grad_norm": 0.11807863414287567, | |
| "learning_rate": 4.065e-05, | |
| "loss": 0.0415, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.5085714285714285, | |
| "grad_norm": 0.1766592413187027, | |
| "learning_rate": 4.057857142857143e-05, | |
| "loss": 0.0424, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 0.17224718630313873, | |
| "learning_rate": 4.050714285714286e-05, | |
| "loss": 0.0443, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.5314285714285716, | |
| "grad_norm": 0.25334063172340393, | |
| "learning_rate": 4.043571428571429e-05, | |
| "loss": 0.0444, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.5428571428571427, | |
| "grad_norm": 0.1791314333677292, | |
| "learning_rate": 4.0364285714285715e-05, | |
| "loss": 0.0429, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.5542857142857143, | |
| "grad_norm": 0.1268022358417511, | |
| "learning_rate": 4.0292857142857146e-05, | |
| "loss": 0.0429, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.5657142857142858, | |
| "grad_norm": 0.1954212486743927, | |
| "learning_rate": 4.022142857142857e-05, | |
| "loss": 0.0455, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.5771428571428572, | |
| "grad_norm": 0.17540448904037476, | |
| "learning_rate": 4.015000000000001e-05, | |
| "loss": 0.0459, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.5885714285714285, | |
| "grad_norm": 0.12704719603061676, | |
| "learning_rate": 4.007857142857143e-05, | |
| "loss": 0.0429, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.2572875916957855, | |
| "learning_rate": 4.000714285714286e-05, | |
| "loss": 0.0412, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.6114285714285714, | |
| "grad_norm": 0.2871057689189911, | |
| "learning_rate": 3.9935714285714285e-05, | |
| "loss": 0.0424, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.6228571428571428, | |
| "grad_norm": 0.14216271042823792, | |
| "learning_rate": 3.9864285714285716e-05, | |
| "loss": 0.041, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.6342857142857143, | |
| "grad_norm": 0.2252836674451828, | |
| "learning_rate": 3.979285714285714e-05, | |
| "loss": 0.0421, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.6457142857142857, | |
| "grad_norm": 0.24771438539028168, | |
| "learning_rate": 3.972142857142858e-05, | |
| "loss": 0.0434, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.657142857142857, | |
| "grad_norm": 0.15339438617229462, | |
| "learning_rate": 3.965e-05, | |
| "loss": 0.0452, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.6685714285714286, | |
| "grad_norm": 0.13443227112293243, | |
| "learning_rate": 3.957857142857143e-05, | |
| "loss": 0.0401, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.6800000000000002, | |
| "grad_norm": 0.15537743270397186, | |
| "learning_rate": 3.9507142857142856e-05, | |
| "loss": 0.0414, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.6914285714285713, | |
| "grad_norm": 0.14956708252429962, | |
| "learning_rate": 3.943571428571429e-05, | |
| "loss": 0.0423, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.7028571428571428, | |
| "grad_norm": 0.13108207285404205, | |
| "learning_rate": 3.936428571428572e-05, | |
| "loss": 0.0385, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.7142857142857144, | |
| "grad_norm": 0.2588536739349365, | |
| "learning_rate": 3.929285714285714e-05, | |
| "loss": 0.0449, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.7257142857142858, | |
| "grad_norm": 0.13190333545207977, | |
| "learning_rate": 3.922142857142858e-05, | |
| "loss": 0.0415, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.737142857142857, | |
| "grad_norm": 0.15415117144584656, | |
| "learning_rate": 3.915e-05, | |
| "loss": 0.0404, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.7485714285714287, | |
| "grad_norm": 0.22507691383361816, | |
| "learning_rate": 3.9078571428571434e-05, | |
| "loss": 0.0412, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.14862731099128723, | |
| "learning_rate": 3.900714285714286e-05, | |
| "loss": 0.0412, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.7714285714285714, | |
| "grad_norm": 0.19868846237659454, | |
| "learning_rate": 3.893571428571429e-05, | |
| "loss": 0.0413, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.782857142857143, | |
| "grad_norm": 0.11937547475099564, | |
| "learning_rate": 3.886428571428571e-05, | |
| "loss": 0.0416, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.7942857142857143, | |
| "grad_norm": 0.17745395004749298, | |
| "learning_rate": 3.879285714285715e-05, | |
| "loss": 0.0412, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.8057142857142856, | |
| "grad_norm": 0.11944162100553513, | |
| "learning_rate": 3.8721428571428574e-05, | |
| "loss": 0.0382, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.8171428571428572, | |
| "grad_norm": 0.3294443190097809, | |
| "learning_rate": 3.8650000000000004e-05, | |
| "loss": 0.0409, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.8285714285714287, | |
| "grad_norm": 0.19752420485019684, | |
| "learning_rate": 3.857857142857143e-05, | |
| "loss": 0.0386, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.8399999999999999, | |
| "grad_norm": 0.18311238288879395, | |
| "learning_rate": 3.850714285714286e-05, | |
| "loss": 0.04, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.8514285714285714, | |
| "grad_norm": 0.12520070374011993, | |
| "learning_rate": 3.843571428571428e-05, | |
| "loss": 0.0393, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.862857142857143, | |
| "grad_norm": 0.15205688774585724, | |
| "learning_rate": 3.8364285714285714e-05, | |
| "loss": 0.0385, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.8742857142857143, | |
| "grad_norm": 0.22413566708564758, | |
| "learning_rate": 3.8292857142857144e-05, | |
| "loss": 0.0394, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.8857142857142857, | |
| "grad_norm": 0.17256265878677368, | |
| "learning_rate": 3.8221428571428575e-05, | |
| "loss": 0.0364, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.8971428571428572, | |
| "grad_norm": 0.20736004412174225, | |
| "learning_rate": 3.8150000000000006e-05, | |
| "loss": 0.039, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.9085714285714286, | |
| "grad_norm": 0.13663062453269958, | |
| "learning_rate": 3.807857142857143e-05, | |
| "loss": 0.0387, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.14069513976573944, | |
| "learning_rate": 3.800714285714286e-05, | |
| "loss": 0.0381, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.9314285714285715, | |
| "grad_norm": 0.11960676312446594, | |
| "learning_rate": 3.7935714285714284e-05, | |
| "loss": 0.0383, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.9428571428571428, | |
| "grad_norm": 0.14744821190834045, | |
| "learning_rate": 3.786428571428572e-05, | |
| "loss": 0.0375, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.9542857142857142, | |
| "grad_norm": 0.10939270257949829, | |
| "learning_rate": 3.7792857142857146e-05, | |
| "loss": 0.0393, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.9657142857142857, | |
| "grad_norm": 0.1914082169532776, | |
| "learning_rate": 3.7721428571428576e-05, | |
| "loss": 0.0378, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.977142857142857, | |
| "grad_norm": 0.12824489176273346, | |
| "learning_rate": 3.765e-05, | |
| "loss": 0.0375, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.9885714285714284, | |
| "grad_norm": 0.1403360813856125, | |
| "learning_rate": 3.757857142857143e-05, | |
| "loss": 0.0375, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.12333709001541138, | |
| "learning_rate": 3.7507142857142855e-05, | |
| "loss": 0.0349, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_bleu": 0.07, | |
| "eval_exact_match": 0.0, | |
| "eval_loss": 0.030519695952534676, | |
| "eval_runtime": 343.7355, | |
| "eval_samples_per_second": 17.455, | |
| "eval_steps_per_second": 1.091, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.0114285714285716, | |
| "grad_norm": 0.20832888782024384, | |
| "learning_rate": 3.7435714285714286e-05, | |
| "loss": 0.0387, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.0228571428571427, | |
| "grad_norm": 0.2904718518257141, | |
| "learning_rate": 3.7364285714285716e-05, | |
| "loss": 0.0406, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.0342857142857143, | |
| "grad_norm": 0.18017873167991638, | |
| "learning_rate": 3.729285714285715e-05, | |
| "loss": 0.0382, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.045714285714286, | |
| "grad_norm": 0.17356900870800018, | |
| "learning_rate": 3.722142857142857e-05, | |
| "loss": 0.0372, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.057142857142857, | |
| "grad_norm": 0.11434105038642883, | |
| "learning_rate": 3.715e-05, | |
| "loss": 0.039, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.0685714285714285, | |
| "grad_norm": 0.14525334537029266, | |
| "learning_rate": 3.707857142857143e-05, | |
| "loss": 0.0389, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 0.12170791625976562, | |
| "learning_rate": 3.7007142857142856e-05, | |
| "loss": 0.0362, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.0914285714285716, | |
| "grad_norm": 0.12874464690685272, | |
| "learning_rate": 3.693571428571429e-05, | |
| "loss": 0.0377, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.1028571428571428, | |
| "grad_norm": 0.13844266533851624, | |
| "learning_rate": 3.686428571428572e-05, | |
| "loss": 0.0387, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.1142857142857143, | |
| "grad_norm": 0.12960191071033478, | |
| "learning_rate": 3.679285714285715e-05, | |
| "loss": 0.0384, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.125714285714286, | |
| "grad_norm": 0.15162068605422974, | |
| "learning_rate": 3.672142857142857e-05, | |
| "loss": 0.0375, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.137142857142857, | |
| "grad_norm": 0.2595318555831909, | |
| "learning_rate": 3.665e-05, | |
| "loss": 0.0389, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 2.1485714285714286, | |
| "grad_norm": 0.19488683342933655, | |
| "learning_rate": 3.657857142857143e-05, | |
| "loss": 0.0405, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 0.17449724674224854, | |
| "learning_rate": 3.650714285714286e-05, | |
| "loss": 0.0376, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 2.1714285714285713, | |
| "grad_norm": 0.17534580826759338, | |
| "learning_rate": 3.643571428571429e-05, | |
| "loss": 0.0356, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.182857142857143, | |
| "grad_norm": 0.11023643612861633, | |
| "learning_rate": 3.636428571428572e-05, | |
| "loss": 0.0363, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 2.1942857142857144, | |
| "grad_norm": 0.1318758726119995, | |
| "learning_rate": 3.629285714285714e-05, | |
| "loss": 0.0363, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 2.2057142857142855, | |
| "grad_norm": 0.13930299878120422, | |
| "learning_rate": 3.6221428571428574e-05, | |
| "loss": 0.0355, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 2.217142857142857, | |
| "grad_norm": 0.13110798597335815, | |
| "learning_rate": 3.615e-05, | |
| "loss": 0.0366, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 2.2285714285714286, | |
| "grad_norm": 0.1405479907989502, | |
| "learning_rate": 3.607857142857143e-05, | |
| "loss": 0.0369, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 0.15569846332073212, | |
| "learning_rate": 3.600714285714286e-05, | |
| "loss": 0.0377, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 2.2514285714285713, | |
| "grad_norm": 0.11618765443563461, | |
| "learning_rate": 3.593571428571429e-05, | |
| "loss": 0.0343, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 2.262857142857143, | |
| "grad_norm": 0.13145925104618073, | |
| "learning_rate": 3.5864285714285714e-05, | |
| "loss": 0.0344, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.2742857142857145, | |
| "grad_norm": 0.10836513340473175, | |
| "learning_rate": 3.5792857142857145e-05, | |
| "loss": 0.0356, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.2857142857142856, | |
| "grad_norm": 0.12755240499973297, | |
| "learning_rate": 3.5721428571428575e-05, | |
| "loss": 0.0369, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.297142857142857, | |
| "grad_norm": 0.11500225961208344, | |
| "learning_rate": 3.565e-05, | |
| "loss": 0.0364, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.3085714285714287, | |
| "grad_norm": 0.1484687179327011, | |
| "learning_rate": 3.557857142857143e-05, | |
| "loss": 0.037, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.13004055619239807, | |
| "learning_rate": 3.550714285714286e-05, | |
| "loss": 0.036, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.3314285714285714, | |
| "grad_norm": 0.1361243575811386, | |
| "learning_rate": 3.543571428571429e-05, | |
| "loss": 0.0374, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.342857142857143, | |
| "grad_norm": 0.10585059970617294, | |
| "learning_rate": 3.5364285714285715e-05, | |
| "loss": 0.0368, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.354285714285714, | |
| "grad_norm": 0.0975913405418396, | |
| "learning_rate": 3.5292857142857146e-05, | |
| "loss": 0.035, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.3657142857142857, | |
| "grad_norm": 0.13559779524803162, | |
| "learning_rate": 3.522142857142857e-05, | |
| "loss": 0.034, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.3771428571428572, | |
| "grad_norm": 0.11691150814294815, | |
| "learning_rate": 3.515e-05, | |
| "loss": 0.0361, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.388571428571429, | |
| "grad_norm": 0.09625136852264404, | |
| "learning_rate": 3.507857142857143e-05, | |
| "loss": 0.0356, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.15719136595726013, | |
| "learning_rate": 3.500714285714286e-05, | |
| "loss": 0.036, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.4114285714285715, | |
| "grad_norm": 0.2051057517528534, | |
| "learning_rate": 3.4935714285714286e-05, | |
| "loss": 0.0356, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.422857142857143, | |
| "grad_norm": 0.13482578098773956, | |
| "learning_rate": 3.486428571428572e-05, | |
| "loss": 0.0349, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.434285714285714, | |
| "grad_norm": 0.13397566974163055, | |
| "learning_rate": 3.479285714285714e-05, | |
| "loss": 0.0364, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.4457142857142857, | |
| "grad_norm": 0.1820402443408966, | |
| "learning_rate": 3.472142857142857e-05, | |
| "loss": 0.0347, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.4571428571428573, | |
| "grad_norm": 0.13529136776924133, | |
| "learning_rate": 3.465e-05, | |
| "loss": 0.0364, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.4685714285714284, | |
| "grad_norm": 0.08887185156345367, | |
| "learning_rate": 3.457857142857143e-05, | |
| "loss": 0.0361, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 0.12572938203811646, | |
| "learning_rate": 3.4507142857142863e-05, | |
| "loss": 0.0369, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.4914285714285715, | |
| "grad_norm": 0.10877826064825058, | |
| "learning_rate": 3.443571428571429e-05, | |
| "loss": 0.0361, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.5028571428571427, | |
| "grad_norm": 0.11405471712350845, | |
| "learning_rate": 3.436428571428572e-05, | |
| "loss": 0.0339, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.5142857142857142, | |
| "grad_norm": 0.12478823214769363, | |
| "learning_rate": 3.429285714285714e-05, | |
| "loss": 0.0386, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.525714285714286, | |
| "grad_norm": 0.1045498475432396, | |
| "learning_rate": 3.422142857142857e-05, | |
| "loss": 0.036, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.5371428571428574, | |
| "grad_norm": 0.11122453212738037, | |
| "learning_rate": 3.415e-05, | |
| "loss": 0.0336, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.5485714285714285, | |
| "grad_norm": 0.1618603616952896, | |
| "learning_rate": 3.4078571428571434e-05, | |
| "loss": 0.0352, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 0.10025645792484283, | |
| "learning_rate": 3.400714285714286e-05, | |
| "loss": 0.0339, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.571428571428571, | |
| "grad_norm": 0.12444077432155609, | |
| "learning_rate": 3.393571428571429e-05, | |
| "loss": 0.033, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.5828571428571427, | |
| "grad_norm": 0.1822001338005066, | |
| "learning_rate": 3.386428571428571e-05, | |
| "loss": 0.035, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.5942857142857143, | |
| "grad_norm": 0.09717897325754166, | |
| "learning_rate": 3.379285714285714e-05, | |
| "loss": 0.0342, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.605714285714286, | |
| "grad_norm": 0.11690513044595718, | |
| "learning_rate": 3.372142857142857e-05, | |
| "loss": 0.0358, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.617142857142857, | |
| "grad_norm": 0.09562231600284576, | |
| "learning_rate": 3.3650000000000005e-05, | |
| "loss": 0.0334, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.6285714285714286, | |
| "grad_norm": 0.1744600236415863, | |
| "learning_rate": 3.357857142857143e-05, | |
| "loss": 0.0349, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.2031196504831314, | |
| "learning_rate": 3.350714285714286e-05, | |
| "loss": 0.0354, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.6514285714285712, | |
| "grad_norm": 0.16061373054981232, | |
| "learning_rate": 3.343571428571429e-05, | |
| "loss": 0.0337, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.662857142857143, | |
| "grad_norm": 0.11644838005304337, | |
| "learning_rate": 3.3364285714285714e-05, | |
| "loss": 0.0345, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.6742857142857144, | |
| "grad_norm": 0.14671610295772552, | |
| "learning_rate": 3.3292857142857145e-05, | |
| "loss": 0.0335, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.685714285714286, | |
| "grad_norm": 0.09919189661741257, | |
| "learning_rate": 3.3221428571428575e-05, | |
| "loss": 0.035, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.697142857142857, | |
| "grad_norm": 0.10584178566932678, | |
| "learning_rate": 3.3150000000000006e-05, | |
| "loss": 0.0337, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.7085714285714286, | |
| "grad_norm": 0.09700863063335419, | |
| "learning_rate": 3.307857142857143e-05, | |
| "loss": 0.0362, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.7199999999999998, | |
| "grad_norm": 0.14599616825580597, | |
| "learning_rate": 3.300714285714286e-05, | |
| "loss": 0.0355, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.7314285714285713, | |
| "grad_norm": 0.11282289028167725, | |
| "learning_rate": 3.2935714285714285e-05, | |
| "loss": 0.0324, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.742857142857143, | |
| "grad_norm": 0.11761584132909775, | |
| "learning_rate": 3.2864285714285715e-05, | |
| "loss": 0.0348, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.7542857142857144, | |
| "grad_norm": 0.14849688112735748, | |
| "learning_rate": 3.279285714285714e-05, | |
| "loss": 0.0338, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.7657142857142856, | |
| "grad_norm": 0.10622404515743256, | |
| "learning_rate": 3.272142857142858e-05, | |
| "loss": 0.0327, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.777142857142857, | |
| "grad_norm": 0.14599359035491943, | |
| "learning_rate": 3.265e-05, | |
| "loss": 0.0335, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.7885714285714287, | |
| "grad_norm": 0.11975230276584625, | |
| "learning_rate": 3.257857142857143e-05, | |
| "loss": 0.033, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.15992556512355804, | |
| "learning_rate": 3.2507142857142855e-05, | |
| "loss": 0.0356, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.8114285714285714, | |
| "grad_norm": 0.08963726460933685, | |
| "learning_rate": 3.2435714285714286e-05, | |
| "loss": 0.0341, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.822857142857143, | |
| "grad_norm": 0.10056816041469574, | |
| "learning_rate": 3.236428571428572e-05, | |
| "loss": 0.0349, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.8342857142857145, | |
| "grad_norm": 0.12423935532569885, | |
| "learning_rate": 3.229285714285715e-05, | |
| "loss": 0.0341, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.8457142857142856, | |
| "grad_norm": 0.12661360204219818, | |
| "learning_rate": 3.222142857142857e-05, | |
| "loss": 0.0339, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.857142857142857, | |
| "grad_norm": 0.08652866631746292, | |
| "learning_rate": 3.215e-05, | |
| "loss": 0.0345, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.8685714285714283, | |
| "grad_norm": 0.10129117220640182, | |
| "learning_rate": 3.207857142857143e-05, | |
| "loss": 0.0364, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 0.09667441248893738, | |
| "learning_rate": 3.200714285714286e-05, | |
| "loss": 0.0335, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.8914285714285715, | |
| "grad_norm": 0.10019873082637787, | |
| "learning_rate": 3.193571428571429e-05, | |
| "loss": 0.034, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.902857142857143, | |
| "grad_norm": 0.109789177775383, | |
| "learning_rate": 3.186428571428571e-05, | |
| "loss": 0.0325, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.914285714285714, | |
| "grad_norm": 0.13727444410324097, | |
| "learning_rate": 3.179285714285715e-05, | |
| "loss": 0.0322, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.9257142857142857, | |
| "grad_norm": 0.09896359592676163, | |
| "learning_rate": 3.172142857142857e-05, | |
| "loss": 0.0346, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.9371428571428573, | |
| "grad_norm": 0.09976238012313843, | |
| "learning_rate": 3.1650000000000004e-05, | |
| "loss": 0.0323, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.9485714285714284, | |
| "grad_norm": 0.10362710803747177, | |
| "learning_rate": 3.157857142857143e-05, | |
| "loss": 0.0347, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 0.11956743150949478, | |
| "learning_rate": 3.150714285714286e-05, | |
| "loss": 0.035, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.9714285714285715, | |
| "grad_norm": 0.1660848706960678, | |
| "learning_rate": 3.143571428571428e-05, | |
| "loss": 0.0341, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.982857142857143, | |
| "grad_norm": 0.0876573994755745, | |
| "learning_rate": 3.136428571428572e-05, | |
| "loss": 0.0349, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.994285714285714, | |
| "grad_norm": 0.10691452026367188, | |
| "learning_rate": 3.1292857142857144e-05, | |
| "loss": 0.0349, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_bleu": 0.07, | |
| "eval_exact_match": 0.0, | |
| "eval_loss": 0.029123220592737198, | |
| "eval_runtime": 344.2459, | |
| "eval_samples_per_second": 17.429, | |
| "eval_steps_per_second": 1.089, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 3.005714285714286, | |
| "grad_norm": 0.14447887241840363, | |
| "learning_rate": 3.1221428571428574e-05, | |
| "loss": 0.0358, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 3.0171428571428573, | |
| "grad_norm": 0.0858982652425766, | |
| "learning_rate": 3.115e-05, | |
| "loss": 0.0323, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 3.0285714285714285, | |
| "grad_norm": 0.08900221437215805, | |
| "learning_rate": 3.107857142857143e-05, | |
| "loss": 0.0334, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "grad_norm": 0.11133663356304169, | |
| "learning_rate": 3.100714285714286e-05, | |
| "loss": 0.0338, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 3.0514285714285716, | |
| "grad_norm": 0.08960743993520737, | |
| "learning_rate": 3.0935714285714284e-05, | |
| "loss": 0.0329, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 3.0628571428571427, | |
| "grad_norm": 0.1239769235253334, | |
| "learning_rate": 3.086428571428572e-05, | |
| "loss": 0.0326, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 3.0742857142857143, | |
| "grad_norm": 0.1012355387210846, | |
| "learning_rate": 3.0792857142857145e-05, | |
| "loss": 0.0346, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 3.085714285714286, | |
| "grad_norm": 0.08918215334415436, | |
| "learning_rate": 3.0721428571428576e-05, | |
| "loss": 0.0332, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 3.097142857142857, | |
| "grad_norm": 0.11924552172422409, | |
| "learning_rate": 3.065e-05, | |
| "loss": 0.0325, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 3.1085714285714285, | |
| "grad_norm": 0.08870343118906021, | |
| "learning_rate": 3.057857142857143e-05, | |
| "loss": 0.0348, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "grad_norm": 0.10869447886943817, | |
| "learning_rate": 3.0507142857142858e-05, | |
| "loss": 0.0316, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 3.1314285714285712, | |
| "grad_norm": 0.15603742003440857, | |
| "learning_rate": 3.043571428571429e-05, | |
| "loss": 0.0319, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 3.142857142857143, | |
| "grad_norm": 0.0926317349076271, | |
| "learning_rate": 3.0364285714285716e-05, | |
| "loss": 0.0337, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 3.1542857142857144, | |
| "grad_norm": 0.1345146745443344, | |
| "learning_rate": 3.0292857142857146e-05, | |
| "loss": 0.0355, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 3.1657142857142855, | |
| "grad_norm": 0.1272844821214676, | |
| "learning_rate": 3.0221428571428574e-05, | |
| "loss": 0.0357, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 3.177142857142857, | |
| "grad_norm": 0.1036539152264595, | |
| "learning_rate": 3.015e-05, | |
| "loss": 0.0341, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 3.1885714285714286, | |
| "grad_norm": 0.1159033477306366, | |
| "learning_rate": 3.007857142857143e-05, | |
| "loss": 0.0332, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 0.15586401522159576, | |
| "learning_rate": 3.0007142857142856e-05, | |
| "loss": 0.0326, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 3.2114285714285713, | |
| "grad_norm": 0.11249198019504547, | |
| "learning_rate": 2.993571428571429e-05, | |
| "loss": 0.0345, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 3.222857142857143, | |
| "grad_norm": 0.08775516599416733, | |
| "learning_rate": 2.9864285714285717e-05, | |
| "loss": 0.0328, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 3.2342857142857144, | |
| "grad_norm": 0.1125701293349266, | |
| "learning_rate": 2.9792857142857144e-05, | |
| "loss": 0.0335, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 3.2457142857142856, | |
| "grad_norm": 0.09027904272079468, | |
| "learning_rate": 2.9721428571428572e-05, | |
| "loss": 0.0322, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 3.257142857142857, | |
| "grad_norm": 0.09299852699041367, | |
| "learning_rate": 2.965e-05, | |
| "loss": 0.0346, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 3.2685714285714287, | |
| "grad_norm": 0.08893876522779465, | |
| "learning_rate": 2.9578571428571426e-05, | |
| "loss": 0.0326, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 3.2800000000000002, | |
| "grad_norm": 0.11996601521968842, | |
| "learning_rate": 2.950714285714286e-05, | |
| "loss": 0.0339, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 3.2914285714285714, | |
| "grad_norm": 0.10866489261388779, | |
| "learning_rate": 2.9435714285714288e-05, | |
| "loss": 0.0351, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 3.302857142857143, | |
| "grad_norm": 0.09160767495632172, | |
| "learning_rate": 2.9364285714285715e-05, | |
| "loss": 0.0327, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 3.314285714285714, | |
| "grad_norm": 0.12444985657930374, | |
| "learning_rate": 2.9292857142857146e-05, | |
| "loss": 0.0355, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 3.3257142857142856, | |
| "grad_norm": 0.07536906003952026, | |
| "learning_rate": 2.9221428571428573e-05, | |
| "loss": 0.0336, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 3.337142857142857, | |
| "grad_norm": 0.08127142488956451, | |
| "learning_rate": 2.915e-05, | |
| "loss": 0.033, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 3.3485714285714288, | |
| "grad_norm": 0.0742766261100769, | |
| "learning_rate": 2.9078571428571428e-05, | |
| "loss": 0.0328, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "grad_norm": 0.08692529797554016, | |
| "learning_rate": 2.9007142857142862e-05, | |
| "loss": 0.0329, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 3.3714285714285714, | |
| "grad_norm": 0.08498033881187439, | |
| "learning_rate": 2.893571428571429e-05, | |
| "loss": 0.0329, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 3.382857142857143, | |
| "grad_norm": 0.07162804156541824, | |
| "learning_rate": 2.8864285714285716e-05, | |
| "loss": 0.0356, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 3.394285714285714, | |
| "grad_norm": 0.12961073219776154, | |
| "learning_rate": 2.8792857142857144e-05, | |
| "loss": 0.0335, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 3.4057142857142857, | |
| "grad_norm": 0.09309065341949463, | |
| "learning_rate": 2.872142857142857e-05, | |
| "loss": 0.0312, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 3.4171428571428573, | |
| "grad_norm": 0.09326250106096268, | |
| "learning_rate": 2.865e-05, | |
| "loss": 0.0334, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 3.4285714285714284, | |
| "grad_norm": 0.23755627870559692, | |
| "learning_rate": 2.8578571428571433e-05, | |
| "loss": 0.0339, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "grad_norm": 0.32597196102142334, | |
| "learning_rate": 2.850714285714286e-05, | |
| "loss": 0.0324, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 3.4514285714285715, | |
| "grad_norm": 0.1102445125579834, | |
| "learning_rate": 2.8435714285714287e-05, | |
| "loss": 0.0317, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 3.4628571428571426, | |
| "grad_norm": 0.09068254381418228, | |
| "learning_rate": 2.8364285714285714e-05, | |
| "loss": 0.0336, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 3.474285714285714, | |
| "grad_norm": 0.10059994459152222, | |
| "learning_rate": 2.8292857142857142e-05, | |
| "loss": 0.0337, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 3.4857142857142858, | |
| "grad_norm": 0.08563591539859772, | |
| "learning_rate": 2.8221428571428573e-05, | |
| "loss": 0.0339, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 3.4971428571428573, | |
| "grad_norm": 0.23142589628696442, | |
| "learning_rate": 2.815e-05, | |
| "loss": 0.0331, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 3.5085714285714285, | |
| "grad_norm": 0.10148648172616959, | |
| "learning_rate": 2.807857142857143e-05, | |
| "loss": 0.0324, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "grad_norm": 0.10303816944360733, | |
| "learning_rate": 2.800714285714286e-05, | |
| "loss": 0.0342, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 3.5314285714285716, | |
| "grad_norm": 0.1418079286813736, | |
| "learning_rate": 2.793571428571429e-05, | |
| "loss": 0.0335, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 3.5428571428571427, | |
| "grad_norm": 0.10841654986143112, | |
| "learning_rate": 2.7864285714285716e-05, | |
| "loss": 0.0327, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 3.5542857142857143, | |
| "grad_norm": 0.07732987403869629, | |
| "learning_rate": 2.7792857142857143e-05, | |
| "loss": 0.0312, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 3.565714285714286, | |
| "grad_norm": 0.08883870393037796, | |
| "learning_rate": 2.772142857142857e-05, | |
| "loss": 0.0318, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 3.5771428571428574, | |
| "grad_norm": 0.08147554099559784, | |
| "learning_rate": 2.7650000000000005e-05, | |
| "loss": 0.033, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 3.5885714285714285, | |
| "grad_norm": 0.14047172665596008, | |
| "learning_rate": 2.7578571428571432e-05, | |
| "loss": 0.033, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "grad_norm": 0.0935438871383667, | |
| "learning_rate": 2.750714285714286e-05, | |
| "loss": 0.0314, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 3.611428571428571, | |
| "grad_norm": 0.08656948059797287, | |
| "learning_rate": 2.7435714285714287e-05, | |
| "loss": 0.0317, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 3.6228571428571428, | |
| "grad_norm": 0.07801087200641632, | |
| "learning_rate": 2.7364285714285714e-05, | |
| "loss": 0.031, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 3.6342857142857143, | |
| "grad_norm": 0.10883987694978714, | |
| "learning_rate": 2.729285714285714e-05, | |
| "loss": 0.031, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 3.645714285714286, | |
| "grad_norm": 0.1345929652452469, | |
| "learning_rate": 2.7221428571428575e-05, | |
| "loss": 0.033, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 3.657142857142857, | |
| "grad_norm": 0.07864482700824738, | |
| "learning_rate": 2.7150000000000003e-05, | |
| "loss": 0.0332, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 3.6685714285714286, | |
| "grad_norm": 0.07242916524410248, | |
| "learning_rate": 2.707857142857143e-05, | |
| "loss": 0.0322, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "grad_norm": 0.30902132391929626, | |
| "learning_rate": 2.7007142857142857e-05, | |
| "loss": 0.0327, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 3.6914285714285713, | |
| "grad_norm": 0.13460323214530945, | |
| "learning_rate": 2.6935714285714288e-05, | |
| "loss": 0.0335, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 3.702857142857143, | |
| "grad_norm": 0.10607478767633438, | |
| "learning_rate": 2.6864285714285715e-05, | |
| "loss": 0.0316, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 3.7142857142857144, | |
| "grad_norm": 0.12986110150814056, | |
| "learning_rate": 2.6792857142857143e-05, | |
| "loss": 0.0326, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 3.725714285714286, | |
| "grad_norm": 0.06864488869905472, | |
| "learning_rate": 2.6721428571428577e-05, | |
| "loss": 0.0316, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 3.737142857142857, | |
| "grad_norm": 0.12121660262346268, | |
| "learning_rate": 2.6650000000000004e-05, | |
| "loss": 0.0319, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 3.7485714285714287, | |
| "grad_norm": 0.07145349681377411, | |
| "learning_rate": 2.657857142857143e-05, | |
| "loss": 0.0332, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "grad_norm": 0.12492495775222778, | |
| "learning_rate": 2.650714285714286e-05, | |
| "loss": 0.0331, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 3.7714285714285714, | |
| "grad_norm": 0.06998569518327713, | |
| "learning_rate": 2.6435714285714286e-05, | |
| "loss": 0.0314, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 3.782857142857143, | |
| "grad_norm": 0.09135474264621735, | |
| "learning_rate": 2.6364285714285713e-05, | |
| "loss": 0.0325, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 3.7942857142857145, | |
| "grad_norm": 0.16541588306427002, | |
| "learning_rate": 2.6292857142857147e-05, | |
| "loss": 0.0342, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 3.8057142857142856, | |
| "grad_norm": 0.08637196570634842, | |
| "learning_rate": 2.6221428571428575e-05, | |
| "loss": 0.0311, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 3.817142857142857, | |
| "grad_norm": 0.09347192198038101, | |
| "learning_rate": 2.6150000000000002e-05, | |
| "loss": 0.0332, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 3.8285714285714287, | |
| "grad_norm": 0.10765931010246277, | |
| "learning_rate": 2.607857142857143e-05, | |
| "loss": 0.032, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "grad_norm": 0.07229693233966827, | |
| "learning_rate": 2.6007142857142857e-05, | |
| "loss": 0.0322, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 3.8514285714285714, | |
| "grad_norm": 0.08713468909263611, | |
| "learning_rate": 2.5935714285714284e-05, | |
| "loss": 0.0345, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 3.862857142857143, | |
| "grad_norm": 0.13004758954048157, | |
| "learning_rate": 2.5864285714285715e-05, | |
| "loss": 0.0328, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 3.8742857142857146, | |
| "grad_norm": 0.0932050496339798, | |
| "learning_rate": 2.5792857142857145e-05, | |
| "loss": 0.0337, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 3.8857142857142857, | |
| "grad_norm": 0.11469905078411102, | |
| "learning_rate": 2.5721428571428573e-05, | |
| "loss": 0.0334, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 3.8971428571428572, | |
| "grad_norm": 0.06456197798252106, | |
| "learning_rate": 2.5650000000000003e-05, | |
| "loss": 0.0308, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 3.9085714285714284, | |
| "grad_norm": 0.06196196377277374, | |
| "learning_rate": 2.557857142857143e-05, | |
| "loss": 0.0311, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "grad_norm": 0.07760277390480042, | |
| "learning_rate": 2.5507142857142858e-05, | |
| "loss": 0.0335, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 3.9314285714285715, | |
| "grad_norm": 0.13347361981868744, | |
| "learning_rate": 2.5435714285714285e-05, | |
| "loss": 0.0322, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 3.942857142857143, | |
| "grad_norm": 0.07051094621419907, | |
| "learning_rate": 2.536428571428572e-05, | |
| "loss": 0.032, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 3.954285714285714, | |
| "grad_norm": 0.07086417078971863, | |
| "learning_rate": 2.5292857142857147e-05, | |
| "loss": 0.0339, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 3.9657142857142857, | |
| "grad_norm": 0.07837912440299988, | |
| "learning_rate": 2.5221428571428574e-05, | |
| "loss": 0.0316, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 3.977142857142857, | |
| "grad_norm": 0.07793241739273071, | |
| "learning_rate": 2.515e-05, | |
| "loss": 0.0335, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 3.9885714285714284, | |
| "grad_norm": 0.10782703012228012, | |
| "learning_rate": 2.507857142857143e-05, | |
| "loss": 0.0327, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.1338101178407669, | |
| "learning_rate": 2.5007142857142856e-05, | |
| "loss": 0.0329, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_bleu": 0.07, | |
| "eval_exact_match": 0.0, | |
| "eval_loss": 0.02901034988462925, | |
| "eval_runtime": 343.4078, | |
| "eval_samples_per_second": 17.472, | |
| "eval_steps_per_second": 1.092, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 4.011428571428572, | |
| "grad_norm": 0.10343596339225769, | |
| "learning_rate": 2.4935714285714287e-05, | |
| "loss": 0.0323, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 4.022857142857143, | |
| "grad_norm": 0.089602530002594, | |
| "learning_rate": 2.4864285714285714e-05, | |
| "loss": 0.0336, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 4.034285714285715, | |
| "grad_norm": 0.06534019112586975, | |
| "learning_rate": 2.479285714285714e-05, | |
| "loss": 0.031, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 4.045714285714285, | |
| "grad_norm": 0.07555046677589417, | |
| "learning_rate": 2.4721428571428572e-05, | |
| "loss": 0.0317, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 4.057142857142857, | |
| "grad_norm": 0.07456432282924652, | |
| "learning_rate": 2.465e-05, | |
| "loss": 0.0332, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 4.0685714285714285, | |
| "grad_norm": 0.07345906645059586, | |
| "learning_rate": 2.457857142857143e-05, | |
| "loss": 0.0311, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "grad_norm": 0.07652904838323593, | |
| "learning_rate": 2.4507142857142857e-05, | |
| "loss": 0.0317, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 4.091428571428572, | |
| "grad_norm": 0.08077405393123627, | |
| "learning_rate": 2.4435714285714288e-05, | |
| "loss": 0.0351, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 4.102857142857143, | |
| "grad_norm": 0.13678324222564697, | |
| "learning_rate": 2.4364285714285716e-05, | |
| "loss": 0.032, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 4.114285714285714, | |
| "grad_norm": 0.08344317227602005, | |
| "learning_rate": 2.4292857142857146e-05, | |
| "loss": 0.0339, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 4.1257142857142854, | |
| "grad_norm": 0.07824575155973434, | |
| "learning_rate": 2.4221428571428574e-05, | |
| "loss": 0.0318, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 4.137142857142857, | |
| "grad_norm": 0.1025475487112999, | |
| "learning_rate": 2.415e-05, | |
| "loss": 0.0323, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 4.148571428571429, | |
| "grad_norm": 0.17045310139656067, | |
| "learning_rate": 2.407857142857143e-05, | |
| "loss": 0.0329, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "grad_norm": 0.07618770748376846, | |
| "learning_rate": 2.400714285714286e-05, | |
| "loss": 0.0307, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 4.171428571428572, | |
| "grad_norm": 0.09664011001586914, | |
| "learning_rate": 2.3935714285714286e-05, | |
| "loss": 0.0326, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 4.182857142857143, | |
| "grad_norm": 0.0686192512512207, | |
| "learning_rate": 2.3864285714285717e-05, | |
| "loss": 0.0317, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 4.194285714285714, | |
| "grad_norm": 0.06639493256807327, | |
| "learning_rate": 2.3792857142857144e-05, | |
| "loss": 0.0335, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 4.2057142857142855, | |
| "grad_norm": 0.12928254902362823, | |
| "learning_rate": 2.372142857142857e-05, | |
| "loss": 0.0314, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 4.217142857142857, | |
| "grad_norm": 0.17191903293132782, | |
| "learning_rate": 2.365e-05, | |
| "loss": 0.0312, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 4.228571428571429, | |
| "grad_norm": 0.0797748863697052, | |
| "learning_rate": 2.357857142857143e-05, | |
| "loss": 0.0328, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "grad_norm": 0.08241511881351471, | |
| "learning_rate": 2.3507142857142857e-05, | |
| "loss": 0.0321, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 4.251428571428572, | |
| "grad_norm": 0.12593980133533478, | |
| "learning_rate": 2.3435714285714284e-05, | |
| "loss": 0.0308, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 4.2628571428571425, | |
| "grad_norm": 0.09482847154140472, | |
| "learning_rate": 2.3364285714285715e-05, | |
| "loss": 0.0323, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 4.274285714285714, | |
| "grad_norm": 0.08406007289886475, | |
| "learning_rate": 2.3292857142857146e-05, | |
| "loss": 0.0325, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 4.285714285714286, | |
| "grad_norm": 0.10658417642116547, | |
| "learning_rate": 2.3221428571428573e-05, | |
| "loss": 0.0313, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 4.297142857142857, | |
| "grad_norm": 0.09814857691526413, | |
| "learning_rate": 2.3150000000000004e-05, | |
| "loss": 0.0327, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 4.308571428571429, | |
| "grad_norm": 0.14591078460216522, | |
| "learning_rate": 2.307857142857143e-05, | |
| "loss": 0.0336, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "grad_norm": 0.11772993952035904, | |
| "learning_rate": 2.3007142857142858e-05, | |
| "loss": 0.0318, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 4.331428571428571, | |
| "grad_norm": 0.08087225258350372, | |
| "learning_rate": 2.293571428571429e-05, | |
| "loss": 0.0313, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 4.3428571428571425, | |
| "grad_norm": 0.11614172905683517, | |
| "learning_rate": 2.2864285714285716e-05, | |
| "loss": 0.0311, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 4.354285714285714, | |
| "grad_norm": 0.10768585652112961, | |
| "learning_rate": 2.2792857142857144e-05, | |
| "loss": 0.0328, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 4.365714285714286, | |
| "grad_norm": 0.07184877246618271, | |
| "learning_rate": 2.272142857142857e-05, | |
| "loss": 0.0307, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 4.377142857142857, | |
| "grad_norm": 0.07980121672153473, | |
| "learning_rate": 2.265e-05, | |
| "loss": 0.0323, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 4.388571428571429, | |
| "grad_norm": 0.1039133369922638, | |
| "learning_rate": 2.257857142857143e-05, | |
| "loss": 0.0333, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "grad_norm": 0.12529844045639038, | |
| "learning_rate": 2.2507142857142856e-05, | |
| "loss": 0.0325, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 4.411428571428571, | |
| "grad_norm": 0.09552208334207535, | |
| "learning_rate": 2.2435714285714287e-05, | |
| "loss": 0.0321, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 4.422857142857143, | |
| "grad_norm": 0.086667500436306, | |
| "learning_rate": 2.2364285714285714e-05, | |
| "loss": 0.032, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 4.434285714285714, | |
| "grad_norm": 0.09163305163383484, | |
| "learning_rate": 2.229285714285714e-05, | |
| "loss": 0.0333, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 4.445714285714286, | |
| "grad_norm": 0.06276048719882965, | |
| "learning_rate": 2.2221428571428572e-05, | |
| "loss": 0.031, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 4.457142857142857, | |
| "grad_norm": 0.07514110952615738, | |
| "learning_rate": 2.215e-05, | |
| "loss": 0.0315, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 4.468571428571429, | |
| "grad_norm": 0.0655631348490715, | |
| "learning_rate": 2.207857142857143e-05, | |
| "loss": 0.0316, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "grad_norm": 0.09730257093906403, | |
| "learning_rate": 2.200714285714286e-05, | |
| "loss": 0.0312, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 4.491428571428571, | |
| "grad_norm": 0.07015182077884674, | |
| "learning_rate": 2.193571428571429e-05, | |
| "loss": 0.0329, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 4.502857142857143, | |
| "grad_norm": 0.09095067530870438, | |
| "learning_rate": 2.1864285714285716e-05, | |
| "loss": 0.0333, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 4.514285714285714, | |
| "grad_norm": 0.08112490177154541, | |
| "learning_rate": 2.1792857142857143e-05, | |
| "loss": 0.0326, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 4.525714285714286, | |
| "grad_norm": 0.11766567826271057, | |
| "learning_rate": 2.1721428571428574e-05, | |
| "loss": 0.0314, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 4.537142857142857, | |
| "grad_norm": 0.0992494523525238, | |
| "learning_rate": 2.165e-05, | |
| "loss": 0.031, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 4.548571428571429, | |
| "grad_norm": 0.2512616813182831, | |
| "learning_rate": 2.157857142857143e-05, | |
| "loss": 0.0321, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 4.5600000000000005, | |
| "grad_norm": 0.13177117705345154, | |
| "learning_rate": 2.150714285714286e-05, | |
| "loss": 0.0312, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 4.571428571428571, | |
| "grad_norm": 0.14732351899147034, | |
| "learning_rate": 2.1435714285714286e-05, | |
| "loss": 0.0332, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 4.582857142857143, | |
| "grad_norm": 0.08171796053647995, | |
| "learning_rate": 2.1364285714285714e-05, | |
| "loss": 0.0325, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 4.594285714285714, | |
| "grad_norm": 0.08084321767091751, | |
| "learning_rate": 2.1292857142857144e-05, | |
| "loss": 0.0316, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 4.605714285714286, | |
| "grad_norm": 0.11409573256969452, | |
| "learning_rate": 2.1221428571428572e-05, | |
| "loss": 0.0329, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 4.617142857142857, | |
| "grad_norm": 0.07737822085618973, | |
| "learning_rate": 2.115e-05, | |
| "loss": 0.0308, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 4.628571428571428, | |
| "grad_norm": 0.08457338809967041, | |
| "learning_rate": 2.107857142857143e-05, | |
| "loss": 0.0319, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "grad_norm": 0.11584480851888657, | |
| "learning_rate": 2.1007142857142857e-05, | |
| "loss": 0.0321, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 4.651428571428571, | |
| "grad_norm": 0.0774964839220047, | |
| "learning_rate": 2.0935714285714288e-05, | |
| "loss": 0.0309, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 4.662857142857143, | |
| "grad_norm": 0.1350235491991043, | |
| "learning_rate": 2.0864285714285715e-05, | |
| "loss": 0.031, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 4.674285714285714, | |
| "grad_norm": 0.07618366181850433, | |
| "learning_rate": 2.0792857142857146e-05, | |
| "loss": 0.031, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 4.685714285714286, | |
| "grad_norm": 0.06407870352268219, | |
| "learning_rate": 2.0721428571428573e-05, | |
| "loss": 0.034, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 4.6971428571428575, | |
| "grad_norm": 0.09820050001144409, | |
| "learning_rate": 2.065e-05, | |
| "loss": 0.0325, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 4.708571428571428, | |
| "grad_norm": 0.07984957098960876, | |
| "learning_rate": 2.057857142857143e-05, | |
| "loss": 0.0312, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "grad_norm": 0.06861667335033417, | |
| "learning_rate": 2.050714285714286e-05, | |
| "loss": 0.0313, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 4.731428571428571, | |
| "grad_norm": 0.07516956329345703, | |
| "learning_rate": 2.0435714285714286e-05, | |
| "loss": 0.0336, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 4.742857142857143, | |
| "grad_norm": 0.06301568448543549, | |
| "learning_rate": 2.0364285714285717e-05, | |
| "loss": 0.0317, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 4.7542857142857144, | |
| "grad_norm": 0.2740742266178131, | |
| "learning_rate": 2.0292857142857144e-05, | |
| "loss": 0.0323, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 4.765714285714286, | |
| "grad_norm": 0.15055391192436218, | |
| "learning_rate": 2.022142857142857e-05, | |
| "loss": 0.0316, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 4.777142857142858, | |
| "grad_norm": 0.06952224671840668, | |
| "learning_rate": 2.0150000000000002e-05, | |
| "loss": 0.0311, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 4.788571428571428, | |
| "grad_norm": 0.06311614066362381, | |
| "learning_rate": 2.007857142857143e-05, | |
| "loss": 0.0306, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 0.05969397723674774, | |
| "learning_rate": 2.0007142857142857e-05, | |
| "loss": 0.0303, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 4.811428571428571, | |
| "grad_norm": 0.1038677766919136, | |
| "learning_rate": 1.9935714285714284e-05, | |
| "loss": 0.032, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 4.822857142857143, | |
| "grad_norm": 0.06804952770471573, | |
| "learning_rate": 1.9864285714285715e-05, | |
| "loss": 0.0304, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 4.8342857142857145, | |
| "grad_norm": 0.0827961415052414, | |
| "learning_rate": 1.9792857142857142e-05, | |
| "loss": 0.031, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 4.845714285714286, | |
| "grad_norm": 0.07299306988716125, | |
| "learning_rate": 1.9721428571428573e-05, | |
| "loss": 0.0308, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 4.857142857142857, | |
| "grad_norm": 0.16776058077812195, | |
| "learning_rate": 1.9650000000000003e-05, | |
| "loss": 0.0313, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 4.868571428571428, | |
| "grad_norm": 0.13184590637683868, | |
| "learning_rate": 1.957857142857143e-05, | |
| "loss": 0.0329, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "grad_norm": 0.08228486776351929, | |
| "learning_rate": 1.9507142857142858e-05, | |
| "loss": 0.0304, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 4.8914285714285715, | |
| "grad_norm": 0.12768115103244781, | |
| "learning_rate": 1.943571428571429e-05, | |
| "loss": 0.0308, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 4.902857142857143, | |
| "grad_norm": 0.09318844974040985, | |
| "learning_rate": 1.9364285714285716e-05, | |
| "loss": 0.031, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 4.914285714285715, | |
| "grad_norm": 0.07075022161006927, | |
| "learning_rate": 1.9292857142857143e-05, | |
| "loss": 0.0307, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 4.925714285714285, | |
| "grad_norm": 0.0722036212682724, | |
| "learning_rate": 1.9221428571428574e-05, | |
| "loss": 0.0316, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 4.937142857142857, | |
| "grad_norm": 0.0727299302816391, | |
| "learning_rate": 1.915e-05, | |
| "loss": 0.0309, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 4.948571428571428, | |
| "grad_norm": 0.061714913696050644, | |
| "learning_rate": 1.907857142857143e-05, | |
| "loss": 0.0311, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "grad_norm": 0.0752352699637413, | |
| "learning_rate": 1.9007142857142856e-05, | |
| "loss": 0.0303, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 4.9714285714285715, | |
| "grad_norm": 0.07696497440338135, | |
| "learning_rate": 1.8935714285714287e-05, | |
| "loss": 0.0314, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 4.982857142857143, | |
| "grad_norm": 0.07310967147350311, | |
| "learning_rate": 1.8864285714285714e-05, | |
| "loss": 0.0318, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 4.994285714285715, | |
| "grad_norm": 0.0728888288140297, | |
| "learning_rate": 1.879285714285714e-05, | |
| "loss": 0.0305, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_bleu": 0.07, | |
| "eval_exact_match": 0.0, | |
| "eval_loss": 0.028437696397304535, | |
| "eval_runtime": 342.7815, | |
| "eval_samples_per_second": 17.504, | |
| "eval_steps_per_second": 1.094, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 5.005714285714285, | |
| "grad_norm": 0.09679137915372849, | |
| "learning_rate": 1.8721428571428572e-05, | |
| "loss": 0.0325, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 5.017142857142857, | |
| "grad_norm": 0.13325752317905426, | |
| "learning_rate": 1.865e-05, | |
| "loss": 0.0311, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 5.0285714285714285, | |
| "grad_norm": 0.0718117207288742, | |
| "learning_rate": 1.857857142857143e-05, | |
| "loss": 0.0314, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "grad_norm": 0.07288797199726105, | |
| "learning_rate": 1.8507142857142857e-05, | |
| "loss": 0.0319, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 5.051428571428572, | |
| "grad_norm": 0.0838438868522644, | |
| "learning_rate": 1.8435714285714288e-05, | |
| "loss": 0.0317, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 5.062857142857143, | |
| "grad_norm": 0.11004387587308884, | |
| "learning_rate": 1.8364285714285715e-05, | |
| "loss": 0.0318, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 5.074285714285715, | |
| "grad_norm": 0.06953708082437515, | |
| "learning_rate": 1.8292857142857146e-05, | |
| "loss": 0.0315, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 5.085714285714285, | |
| "grad_norm": 0.08132421225309372, | |
| "learning_rate": 1.8221428571428573e-05, | |
| "loss": 0.033, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 5.097142857142857, | |
| "grad_norm": 0.09449005872011185, | |
| "learning_rate": 1.815e-05, | |
| "loss": 0.0309, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 5.1085714285714285, | |
| "grad_norm": 0.1384226381778717, | |
| "learning_rate": 1.807857142857143e-05, | |
| "loss": 0.0313, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "grad_norm": 0.11961528658866882, | |
| "learning_rate": 1.800714285714286e-05, | |
| "loss": 0.0323, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 5.131428571428572, | |
| "grad_norm": 0.07924534380435944, | |
| "learning_rate": 1.7935714285714286e-05, | |
| "loss": 0.0309, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 5.142857142857143, | |
| "grad_norm": 0.09376318007707596, | |
| "learning_rate": 1.7864285714285713e-05, | |
| "loss": 0.0305, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 5.154285714285714, | |
| "grad_norm": 0.13669812679290771, | |
| "learning_rate": 1.7792857142857144e-05, | |
| "loss": 0.0301, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 5.1657142857142855, | |
| "grad_norm": 0.11170660704374313, | |
| "learning_rate": 1.772142857142857e-05, | |
| "loss": 0.031, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 5.177142857142857, | |
| "grad_norm": 0.08319266140460968, | |
| "learning_rate": 1.765e-05, | |
| "loss": 0.0304, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 5.188571428571429, | |
| "grad_norm": 0.08556800335645676, | |
| "learning_rate": 1.757857142857143e-05, | |
| "loss": 0.0308, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "grad_norm": 0.10389435291290283, | |
| "learning_rate": 1.7507142857142857e-05, | |
| "loss": 0.0306, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 5.211428571428572, | |
| "grad_norm": 0.07358275353908539, | |
| "learning_rate": 1.7435714285714287e-05, | |
| "loss": 0.0319, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 5.222857142857142, | |
| "grad_norm": 0.07893013209104538, | |
| "learning_rate": 1.7364285714285715e-05, | |
| "loss": 0.0313, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 5.234285714285714, | |
| "grad_norm": 0.0908748209476471, | |
| "learning_rate": 1.7292857142857145e-05, | |
| "loss": 0.0284, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 5.2457142857142856, | |
| "grad_norm": 0.07306382060050964, | |
| "learning_rate": 1.7221428571428573e-05, | |
| "loss": 0.0309, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 5.257142857142857, | |
| "grad_norm": 0.06851589679718018, | |
| "learning_rate": 1.7150000000000004e-05, | |
| "loss": 0.0301, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 5.268571428571429, | |
| "grad_norm": 0.10181226581335068, | |
| "learning_rate": 1.707857142857143e-05, | |
| "loss": 0.032, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "grad_norm": 0.08728600293397903, | |
| "learning_rate": 1.7007142857142858e-05, | |
| "loss": 0.0324, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 5.291428571428572, | |
| "grad_norm": 0.09001002460718155, | |
| "learning_rate": 1.6935714285714285e-05, | |
| "loss": 0.0304, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 5.3028571428571425, | |
| "grad_norm": 0.09468323737382889, | |
| "learning_rate": 1.6864285714285716e-05, | |
| "loss": 0.0303, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 5.314285714285714, | |
| "grad_norm": 0.07029464840888977, | |
| "learning_rate": 1.6792857142857143e-05, | |
| "loss": 0.0313, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 5.325714285714286, | |
| "grad_norm": 0.0702148973941803, | |
| "learning_rate": 1.672142857142857e-05, | |
| "loss": 0.0312, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 5.337142857142857, | |
| "grad_norm": 0.06833337247371674, | |
| "learning_rate": 1.665e-05, | |
| "loss": 0.0319, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 5.348571428571429, | |
| "grad_norm": 0.10571616888046265, | |
| "learning_rate": 1.657857142857143e-05, | |
| "loss": 0.0306, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "grad_norm": 0.07842124998569489, | |
| "learning_rate": 1.6507142857142856e-05, | |
| "loss": 0.0306, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 5.371428571428572, | |
| "grad_norm": 0.07739285379648209, | |
| "learning_rate": 1.6435714285714287e-05, | |
| "loss": 0.0304, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 5.382857142857143, | |
| "grad_norm": 0.07978440076112747, | |
| "learning_rate": 1.6364285714285714e-05, | |
| "loss": 0.0307, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 5.394285714285714, | |
| "grad_norm": 0.07032512873411179, | |
| "learning_rate": 1.629285714285714e-05, | |
| "loss": 0.0313, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 5.405714285714286, | |
| "grad_norm": 0.06472887843847275, | |
| "learning_rate": 1.6221428571428572e-05, | |
| "loss": 0.0295, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 5.417142857142857, | |
| "grad_norm": 0.08794354647397995, | |
| "learning_rate": 1.6150000000000003e-05, | |
| "loss": 0.0305, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 5.428571428571429, | |
| "grad_norm": 0.08242463320493698, | |
| "learning_rate": 1.607857142857143e-05, | |
| "loss": 0.0307, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "grad_norm": 0.0753536969423294, | |
| "learning_rate": 1.6007142857142858e-05, | |
| "loss": 0.0301, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 5.451428571428571, | |
| "grad_norm": 0.06398053467273712, | |
| "learning_rate": 1.5935714285714288e-05, | |
| "loss": 0.0299, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 5.462857142857143, | |
| "grad_norm": 0.07519324123859406, | |
| "learning_rate": 1.5864285714285716e-05, | |
| "loss": 0.0307, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 5.474285714285714, | |
| "grad_norm": 0.0712146982550621, | |
| "learning_rate": 1.5792857142857143e-05, | |
| "loss": 0.0316, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 5.485714285714286, | |
| "grad_norm": 0.06212683022022247, | |
| "learning_rate": 1.5721428571428574e-05, | |
| "loss": 0.0298, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 5.497142857142857, | |
| "grad_norm": 0.060816798359155655, | |
| "learning_rate": 1.565e-05, | |
| "loss": 0.0316, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 5.508571428571429, | |
| "grad_norm": 0.06750506162643433, | |
| "learning_rate": 1.5578571428571428e-05, | |
| "loss": 0.0307, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "grad_norm": 0.09742510318756104, | |
| "learning_rate": 1.550714285714286e-05, | |
| "loss": 0.0326, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 5.531428571428571, | |
| "grad_norm": 0.061222221702337265, | |
| "learning_rate": 1.5435714285714286e-05, | |
| "loss": 0.0289, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 5.542857142857143, | |
| "grad_norm": 0.08668530732393265, | |
| "learning_rate": 1.5364285714285714e-05, | |
| "loss": 0.0315, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 5.554285714285714, | |
| "grad_norm": 0.0962333232164383, | |
| "learning_rate": 1.5292857142857144e-05, | |
| "loss": 0.0316, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 5.565714285714286, | |
| "grad_norm": 0.06368666142225266, | |
| "learning_rate": 1.5221428571428573e-05, | |
| "loss": 0.031, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 5.577142857142857, | |
| "grad_norm": 0.0649942010641098, | |
| "learning_rate": 1.515e-05, | |
| "loss": 0.03, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 5.588571428571429, | |
| "grad_norm": 0.08651057630777359, | |
| "learning_rate": 1.5078571428571428e-05, | |
| "loss": 0.0301, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "grad_norm": 0.06237189471721649, | |
| "learning_rate": 1.5007142857142859e-05, | |
| "loss": 0.0305, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 5.611428571428571, | |
| "grad_norm": 0.06680475175380707, | |
| "learning_rate": 1.4935714285714286e-05, | |
| "loss": 0.0311, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 5.622857142857143, | |
| "grad_norm": 0.07316766679286957, | |
| "learning_rate": 1.4864285714285713e-05, | |
| "loss": 0.0308, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 5.634285714285714, | |
| "grad_norm": 0.09247075021266937, | |
| "learning_rate": 1.4792857142857144e-05, | |
| "loss": 0.0304, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 5.645714285714286, | |
| "grad_norm": 0.2784198820590973, | |
| "learning_rate": 1.4721428571428571e-05, | |
| "loss": 0.0294, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 5.6571428571428575, | |
| "grad_norm": 0.09237693250179291, | |
| "learning_rate": 1.465e-05, | |
| "loss": 0.0313, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 5.668571428571429, | |
| "grad_norm": 0.07822994887828827, | |
| "learning_rate": 1.4578571428571431e-05, | |
| "loss": 0.0301, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "grad_norm": 0.10892365127801895, | |
| "learning_rate": 1.4507142857142858e-05, | |
| "loss": 0.0314, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 5.691428571428571, | |
| "grad_norm": 0.07287736982107162, | |
| "learning_rate": 1.4435714285714286e-05, | |
| "loss": 0.0309, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 5.702857142857143, | |
| "grad_norm": 0.0734676644206047, | |
| "learning_rate": 1.4364285714285716e-05, | |
| "loss": 0.0319, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 5.714285714285714, | |
| "grad_norm": 0.06947411596775055, | |
| "learning_rate": 1.4292857142857144e-05, | |
| "loss": 0.0326, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 5.725714285714286, | |
| "grad_norm": 0.08650074899196625, | |
| "learning_rate": 1.4221428571428571e-05, | |
| "loss": 0.0318, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 5.737142857142857, | |
| "grad_norm": 0.059798020869493484, | |
| "learning_rate": 1.415e-05, | |
| "loss": 0.0315, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 5.748571428571428, | |
| "grad_norm": 0.1155044287443161, | |
| "learning_rate": 1.4078571428571429e-05, | |
| "loss": 0.0305, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "grad_norm": 0.06892979145050049, | |
| "learning_rate": 1.4007142857142858e-05, | |
| "loss": 0.0314, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 5.771428571428571, | |
| "grad_norm": 0.08073502033948898, | |
| "learning_rate": 1.3935714285714285e-05, | |
| "loss": 0.0304, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 5.782857142857143, | |
| "grad_norm": 0.07314231246709824, | |
| "learning_rate": 1.3864285714285716e-05, | |
| "loss": 0.03, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 5.7942857142857145, | |
| "grad_norm": 0.08082398027181625, | |
| "learning_rate": 1.3792857142857143e-05, | |
| "loss": 0.0307, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 5.805714285714286, | |
| "grad_norm": 0.08921355754137039, | |
| "learning_rate": 1.372142857142857e-05, | |
| "loss": 0.0315, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 5.817142857142857, | |
| "grad_norm": 0.05853136256337166, | |
| "learning_rate": 1.3650000000000001e-05, | |
| "loss": 0.0297, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 5.828571428571428, | |
| "grad_norm": 0.09137912839651108, | |
| "learning_rate": 1.3578571428571429e-05, | |
| "loss": 0.0304, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "grad_norm": 0.0859987661242485, | |
| "learning_rate": 1.3507142857142858e-05, | |
| "loss": 0.0313, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 5.851428571428571, | |
| "grad_norm": 0.06775084137916565, | |
| "learning_rate": 1.3435714285714287e-05, | |
| "loss": 0.029, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 5.862857142857143, | |
| "grad_norm": 0.1020933985710144, | |
| "learning_rate": 1.3364285714285716e-05, | |
| "loss": 0.0318, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 5.8742857142857146, | |
| "grad_norm": 0.07362332940101624, | |
| "learning_rate": 1.3292857142857143e-05, | |
| "loss": 0.0298, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 5.885714285714286, | |
| "grad_norm": 0.07384783774614334, | |
| "learning_rate": 1.322142857142857e-05, | |
| "loss": 0.0325, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 5.897142857142857, | |
| "grad_norm": 0.07200124859809875, | |
| "learning_rate": 1.3150000000000001e-05, | |
| "loss": 0.0296, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 5.908571428571428, | |
| "grad_norm": 0.09490924328565598, | |
| "learning_rate": 1.3078571428571428e-05, | |
| "loss": 0.0321, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "grad_norm": 0.07857917249202728, | |
| "learning_rate": 1.3007142857142856e-05, | |
| "loss": 0.031, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 5.9314285714285715, | |
| "grad_norm": 0.08538127690553665, | |
| "learning_rate": 1.2935714285714286e-05, | |
| "loss": 0.0302, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 5.942857142857143, | |
| "grad_norm": 0.13723719120025635, | |
| "learning_rate": 1.2864285714285716e-05, | |
| "loss": 0.0301, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 5.954285714285715, | |
| "grad_norm": 0.06784668564796448, | |
| "learning_rate": 1.2792857142857143e-05, | |
| "loss": 0.0326, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 5.965714285714286, | |
| "grad_norm": 0.07009400427341461, | |
| "learning_rate": 1.2721428571428574e-05, | |
| "loss": 0.0307, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 5.977142857142857, | |
| "grad_norm": 0.06899885833263397, | |
| "learning_rate": 1.2650000000000001e-05, | |
| "loss": 0.0321, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 5.988571428571428, | |
| "grad_norm": 0.06912403553724289, | |
| "learning_rate": 1.2578571428571428e-05, | |
| "loss": 0.031, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.0813736692070961, | |
| "learning_rate": 1.2507142857142859e-05, | |
| "loss": 0.0317, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_bleu": 0.07, | |
| "eval_exact_match": 0.0, | |
| "eval_loss": 0.028217371553182602, | |
| "eval_runtime": 343.1777, | |
| "eval_samples_per_second": 17.484, | |
| "eval_steps_per_second": 1.093, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 6.011428571428572, | |
| "grad_norm": 0.06998402625322342, | |
| "learning_rate": 1.2435714285714286e-05, | |
| "loss": 0.0307, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 6.022857142857143, | |
| "grad_norm": 0.05968615785241127, | |
| "learning_rate": 1.2364285714285714e-05, | |
| "loss": 0.0318, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 6.034285714285715, | |
| "grad_norm": 0.09144943207502365, | |
| "learning_rate": 1.2292857142857144e-05, | |
| "loss": 0.0319, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 6.045714285714285, | |
| "grad_norm": 0.07859035581350327, | |
| "learning_rate": 1.2221428571428573e-05, | |
| "loss": 0.0304, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 6.057142857142857, | |
| "grad_norm": 0.06295861303806305, | |
| "learning_rate": 1.215e-05, | |
| "loss": 0.0312, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 6.0685714285714285, | |
| "grad_norm": 0.12032752484083176, | |
| "learning_rate": 1.207857142857143e-05, | |
| "loss": 0.0308, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "grad_norm": 0.0782964825630188, | |
| "learning_rate": 1.2007142857142857e-05, | |
| "loss": 0.0317, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 6.091428571428572, | |
| "grad_norm": 0.05909128487110138, | |
| "learning_rate": 1.1935714285714286e-05, | |
| "loss": 0.0289, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 6.102857142857143, | |
| "grad_norm": 0.0750165656208992, | |
| "learning_rate": 1.1864285714285715e-05, | |
| "loss": 0.0307, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 6.114285714285714, | |
| "grad_norm": 0.07260541617870331, | |
| "learning_rate": 1.1792857142857142e-05, | |
| "loss": 0.0307, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 6.1257142857142854, | |
| "grad_norm": 0.07592778652906418, | |
| "learning_rate": 1.1721428571428571e-05, | |
| "loss": 0.0309, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 6.137142857142857, | |
| "grad_norm": 0.08206040412187576, | |
| "learning_rate": 1.1650000000000002e-05, | |
| "loss": 0.0302, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 6.148571428571429, | |
| "grad_norm": 0.08673234283924103, | |
| "learning_rate": 1.157857142857143e-05, | |
| "loss": 0.0297, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "grad_norm": 0.06839073449373245, | |
| "learning_rate": 1.1507142857142858e-05, | |
| "loss": 0.0304, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 6.171428571428572, | |
| "grad_norm": 0.0769868940114975, | |
| "learning_rate": 1.1435714285714286e-05, | |
| "loss": 0.0319, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 6.182857142857143, | |
| "grad_norm": 0.07529103755950928, | |
| "learning_rate": 1.1364285714285715e-05, | |
| "loss": 0.0312, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 6.194285714285714, | |
| "grad_norm": 0.06735879182815552, | |
| "learning_rate": 1.1292857142857144e-05, | |
| "loss": 0.031, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 6.2057142857142855, | |
| "grad_norm": 0.06926565617322922, | |
| "learning_rate": 1.1221428571428571e-05, | |
| "loss": 0.031, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 6.217142857142857, | |
| "grad_norm": 0.09316926449537277, | |
| "learning_rate": 1.115e-05, | |
| "loss": 0.0319, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 6.228571428571429, | |
| "grad_norm": 0.07555294036865234, | |
| "learning_rate": 1.1078571428571429e-05, | |
| "loss": 0.0303, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "grad_norm": 0.08294390887022018, | |
| "learning_rate": 1.1007142857142858e-05, | |
| "loss": 0.0297, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 6.251428571428572, | |
| "grad_norm": 0.06981640309095383, | |
| "learning_rate": 1.0935714285714287e-05, | |
| "loss": 0.0305, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 6.2628571428571425, | |
| "grad_norm": 0.07442454993724823, | |
| "learning_rate": 1.0864285714285714e-05, | |
| "loss": 0.031, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 6.274285714285714, | |
| "grad_norm": 0.12811383605003357, | |
| "learning_rate": 1.0792857142857143e-05, | |
| "loss": 0.0299, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 6.285714285714286, | |
| "grad_norm": 0.0792556032538414, | |
| "learning_rate": 1.0721428571428572e-05, | |
| "loss": 0.0315, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 6.297142857142857, | |
| "grad_norm": 0.08356291055679321, | |
| "learning_rate": 1.065e-05, | |
| "loss": 0.0319, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 6.308571428571429, | |
| "grad_norm": 0.10948007553815842, | |
| "learning_rate": 1.0578571428571429e-05, | |
| "loss": 0.0293, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "grad_norm": 0.0714062824845314, | |
| "learning_rate": 1.0507142857142858e-05, | |
| "loss": 0.0303, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 6.331428571428571, | |
| "grad_norm": 0.058776937425136566, | |
| "learning_rate": 1.0435714285714287e-05, | |
| "loss": 0.0304, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 6.3428571428571425, | |
| "grad_norm": 0.07753952592611313, | |
| "learning_rate": 1.0364285714285716e-05, | |
| "loss": 0.0311, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 6.354285714285714, | |
| "grad_norm": 0.08094929158687592, | |
| "learning_rate": 1.0292857142857143e-05, | |
| "loss": 0.03, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 6.365714285714286, | |
| "grad_norm": 0.07799239456653595, | |
| "learning_rate": 1.0221428571428572e-05, | |
| "loss": 0.0316, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 6.377142857142857, | |
| "grad_norm": 0.07615884393453598, | |
| "learning_rate": 1.0150000000000001e-05, | |
| "loss": 0.0317, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 6.388571428571429, | |
| "grad_norm": 0.06691175699234009, | |
| "learning_rate": 1.0078571428571428e-05, | |
| "loss": 0.0312, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "grad_norm": 0.07050418853759766, | |
| "learning_rate": 1.0007142857142857e-05, | |
| "loss": 0.0314, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 6.411428571428571, | |
| "grad_norm": 0.07742037624120712, | |
| "learning_rate": 9.935714285714286e-06, | |
| "loss": 0.0302, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 6.422857142857143, | |
| "grad_norm": 0.07048898935317993, | |
| "learning_rate": 9.864285714285715e-06, | |
| "loss": 0.0305, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 6.434285714285714, | |
| "grad_norm": 0.07033011317253113, | |
| "learning_rate": 9.792857142857144e-06, | |
| "loss": 0.031, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 6.445714285714286, | |
| "grad_norm": 0.1080131009221077, | |
| "learning_rate": 9.721428571428572e-06, | |
| "loss": 0.032, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 6.457142857142857, | |
| "grad_norm": 0.08267289400100708, | |
| "learning_rate": 9.65e-06, | |
| "loss": 0.0302, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 6.468571428571429, | |
| "grad_norm": 0.058059051632881165, | |
| "learning_rate": 9.578571428571428e-06, | |
| "loss": 0.0304, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "grad_norm": 0.07850830256938934, | |
| "learning_rate": 9.507142857142857e-06, | |
| "loss": 0.0308, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 6.491428571428571, | |
| "grad_norm": 0.08184772729873657, | |
| "learning_rate": 9.435714285714286e-06, | |
| "loss": 0.0306, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 6.502857142857143, | |
| "grad_norm": 0.06574039161205292, | |
| "learning_rate": 9.364285714285715e-06, | |
| "loss": 0.0299, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 6.514285714285714, | |
| "grad_norm": 0.24083204567432404, | |
| "learning_rate": 9.292857142857144e-06, | |
| "loss": 0.0316, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 6.525714285714286, | |
| "grad_norm": 0.09220234304666519, | |
| "learning_rate": 9.221428571428573e-06, | |
| "loss": 0.0284, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 6.537142857142857, | |
| "grad_norm": 0.07189635932445526, | |
| "learning_rate": 9.15e-06, | |
| "loss": 0.0331, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 6.548571428571429, | |
| "grad_norm": 0.10544195771217346, | |
| "learning_rate": 9.07857142857143e-06, | |
| "loss": 0.0311, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 6.5600000000000005, | |
| "grad_norm": 0.07962433993816376, | |
| "learning_rate": 9.007142857142857e-06, | |
| "loss": 0.0309, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 6.571428571428571, | |
| "grad_norm": 0.07586560398340225, | |
| "learning_rate": 8.935714285714286e-06, | |
| "loss": 0.0282, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 6.582857142857143, | |
| "grad_norm": 0.07862204313278198, | |
| "learning_rate": 8.864285714285715e-06, | |
| "loss": 0.0313, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 6.594285714285714, | |
| "grad_norm": 0.05962090566754341, | |
| "learning_rate": 8.792857142857142e-06, | |
| "loss": 0.0301, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 6.605714285714286, | |
| "grad_norm": 0.06269550323486328, | |
| "learning_rate": 8.721428571428573e-06, | |
| "loss": 0.0297, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 6.617142857142857, | |
| "grad_norm": 0.11576901376247406, | |
| "learning_rate": 8.65e-06, | |
| "loss": 0.0316, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 6.628571428571428, | |
| "grad_norm": 0.06566612422466278, | |
| "learning_rate": 8.57857142857143e-06, | |
| "loss": 0.0304, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "grad_norm": 0.06394005566835403, | |
| "learning_rate": 8.507142857142858e-06, | |
| "loss": 0.0309, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 6.651428571428571, | |
| "grad_norm": 0.06856530159711838, | |
| "learning_rate": 8.435714285714286e-06, | |
| "loss": 0.0313, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 6.662857142857143, | |
| "grad_norm": 0.07678523659706116, | |
| "learning_rate": 8.364285714285715e-06, | |
| "loss": 0.0324, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 6.674285714285714, | |
| "grad_norm": 0.10768764466047287, | |
| "learning_rate": 8.292857142857144e-06, | |
| "loss": 0.03, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 6.685714285714286, | |
| "grad_norm": 0.13758371770381927, | |
| "learning_rate": 8.221428571428571e-06, | |
| "loss": 0.032, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 6.6971428571428575, | |
| "grad_norm": 0.06116107106208801, | |
| "learning_rate": 8.15e-06, | |
| "loss": 0.0323, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 6.708571428571428, | |
| "grad_norm": 0.058115601539611816, | |
| "learning_rate": 8.078571428571429e-06, | |
| "loss": 0.0308, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "grad_norm": 0.061137229204177856, | |
| "learning_rate": 8.007142857142858e-06, | |
| "loss": 0.029, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 6.731428571428571, | |
| "grad_norm": 0.13177888095378876, | |
| "learning_rate": 7.935714285714287e-06, | |
| "loss": 0.0305, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 6.742857142857143, | |
| "grad_norm": 0.06651601195335388, | |
| "learning_rate": 7.864285714285714e-06, | |
| "loss": 0.0297, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 6.7542857142857144, | |
| "grad_norm": 0.06756425648927689, | |
| "learning_rate": 7.792857142857143e-06, | |
| "loss": 0.0317, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 6.765714285714286, | |
| "grad_norm": 0.11434056609869003, | |
| "learning_rate": 7.72142857142857e-06, | |
| "loss": 0.0294, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 6.777142857142858, | |
| "grad_norm": 0.06794974952936172, | |
| "learning_rate": 7.65e-06, | |
| "loss": 0.031, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 6.788571428571428, | |
| "grad_norm": 0.06978090107440948, | |
| "learning_rate": 7.5785714285714295e-06, | |
| "loss": 0.03, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "grad_norm": 0.06427010148763657, | |
| "learning_rate": 7.507142857142857e-06, | |
| "loss": 0.031, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 6.811428571428571, | |
| "grad_norm": 0.06627924740314484, | |
| "learning_rate": 7.435714285714286e-06, | |
| "loss": 0.0295, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 6.822857142857143, | |
| "grad_norm": 0.07513666898012161, | |
| "learning_rate": 7.364285714285716e-06, | |
| "loss": 0.0309, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 6.8342857142857145, | |
| "grad_norm": 0.08213704079389572, | |
| "learning_rate": 7.292857142857143e-06, | |
| "loss": 0.0321, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 6.845714285714286, | |
| "grad_norm": 0.059965796768665314, | |
| "learning_rate": 7.221428571428572e-06, | |
| "loss": 0.0293, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 6.857142857142857, | |
| "grad_norm": 0.10298174619674683, | |
| "learning_rate": 7.15e-06, | |
| "loss": 0.0311, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 6.868571428571428, | |
| "grad_norm": 0.08990275859832764, | |
| "learning_rate": 7.078571428571429e-06, | |
| "loss": 0.0302, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "grad_norm": 0.06812401860952377, | |
| "learning_rate": 7.007142857142858e-06, | |
| "loss": 0.03, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 6.8914285714285715, | |
| "grad_norm": 0.08880835771560669, | |
| "learning_rate": 6.9357142857142855e-06, | |
| "loss": 0.0307, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 6.902857142857143, | |
| "grad_norm": 0.06701825559139252, | |
| "learning_rate": 6.8642857142857145e-06, | |
| "loss": 0.0298, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 6.914285714285715, | |
| "grad_norm": 0.11159258335828781, | |
| "learning_rate": 6.7928571428571435e-06, | |
| "loss": 0.0305, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 6.925714285714285, | |
| "grad_norm": 0.07996679842472076, | |
| "learning_rate": 6.721428571428572e-06, | |
| "loss": 0.0309, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 6.937142857142857, | |
| "grad_norm": 0.07035525143146515, | |
| "learning_rate": 6.650000000000001e-06, | |
| "loss": 0.0299, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 6.948571428571428, | |
| "grad_norm": 0.07342403382062912, | |
| "learning_rate": 6.578571428571428e-06, | |
| "loss": 0.0303, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "grad_norm": 0.05549318343400955, | |
| "learning_rate": 6.507142857142858e-06, | |
| "loss": 0.0295, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 6.9714285714285715, | |
| "grad_norm": 0.054844338446855545, | |
| "learning_rate": 6.435714285714287e-06, | |
| "loss": 0.0303, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 6.982857142857143, | |
| "grad_norm": 0.07505437731742859, | |
| "learning_rate": 6.364285714285714e-06, | |
| "loss": 0.0321, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 6.994285714285715, | |
| "grad_norm": 0.07832371443510056, | |
| "learning_rate": 6.292857142857143e-06, | |
| "loss": 0.031, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_bleu": 0.07, | |
| "eval_exact_match": 0.0, | |
| "eval_loss": 0.028141871094703674, | |
| "eval_runtime": 343.5619, | |
| "eval_samples_per_second": 17.464, | |
| "eval_steps_per_second": 1.092, | |
| "step": 6125 | |
| }, | |
| { | |
| "epoch": 7.005714285714285, | |
| "grad_norm": 0.06968718022108078, | |
| "learning_rate": 6.221428571428571e-06, | |
| "loss": 0.0298, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 7.017142857142857, | |
| "grad_norm": 0.06143512949347496, | |
| "learning_rate": 6.15e-06, | |
| "loss": 0.0296, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 7.0285714285714285, | |
| "grad_norm": 0.06617894023656845, | |
| "learning_rate": 6.0785714285714286e-06, | |
| "loss": 0.0304, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "grad_norm": 0.08548376709222794, | |
| "learning_rate": 6.007142857142858e-06, | |
| "loss": 0.0316, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 7.051428571428572, | |
| "grad_norm": 0.10409008711576462, | |
| "learning_rate": 5.935714285714286e-06, | |
| "loss": 0.0323, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 7.062857142857143, | |
| "grad_norm": 0.06821285933256149, | |
| "learning_rate": 5.864285714285715e-06, | |
| "loss": 0.0308, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 7.074285714285715, | |
| "grad_norm": 0.06576462835073471, | |
| "learning_rate": 5.792857142857143e-06, | |
| "loss": 0.0286, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 7.085714285714285, | |
| "grad_norm": 0.08164945989847183, | |
| "learning_rate": 5.721428571428572e-06, | |
| "loss": 0.0303, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 7.097142857142857, | |
| "grad_norm": 0.060051556676626205, | |
| "learning_rate": 5.65e-06, | |
| "loss": 0.0296, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 7.1085714285714285, | |
| "grad_norm": 0.05957673490047455, | |
| "learning_rate": 5.578571428571429e-06, | |
| "loss": 0.0311, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "grad_norm": 0.055859100073575974, | |
| "learning_rate": 5.507142857142857e-06, | |
| "loss": 0.0293, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 7.131428571428572, | |
| "grad_norm": 0.07006938755512238, | |
| "learning_rate": 5.4357142857142855e-06, | |
| "loss": 0.0308, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 7.142857142857143, | |
| "grad_norm": 0.055445924401283264, | |
| "learning_rate": 5.3642857142857145e-06, | |
| "loss": 0.03, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 7.154285714285714, | |
| "grad_norm": 0.07507022470235825, | |
| "learning_rate": 5.2928571428571435e-06, | |
| "loss": 0.0319, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 7.1657142857142855, | |
| "grad_norm": 0.05871541053056717, | |
| "learning_rate": 5.221428571428572e-06, | |
| "loss": 0.0299, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 7.177142857142857, | |
| "grad_norm": 0.05965025722980499, | |
| "learning_rate": 5.15e-06, | |
| "loss": 0.0299, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 7.188571428571429, | |
| "grad_norm": 0.07607734203338623, | |
| "learning_rate": 5.078571428571429e-06, | |
| "loss": 0.0304, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "grad_norm": 0.0674389973282814, | |
| "learning_rate": 5.007142857142858e-06, | |
| "loss": 0.0302, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 7.211428571428572, | |
| "grad_norm": 0.14311166107654572, | |
| "learning_rate": 4.935714285714286e-06, | |
| "loss": 0.0317, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 7.222857142857142, | |
| "grad_norm": 0.06374186277389526, | |
| "learning_rate": 4.864285714285714e-06, | |
| "loss": 0.0291, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 7.234285714285714, | |
| "grad_norm": 0.08698999881744385, | |
| "learning_rate": 4.792857142857143e-06, | |
| "loss": 0.0291, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 7.2457142857142856, | |
| "grad_norm": 0.06159118562936783, | |
| "learning_rate": 4.721428571428572e-06, | |
| "loss": 0.0302, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 7.257142857142857, | |
| "grad_norm": 0.07281281054019928, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.0302, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 7.268571428571429, | |
| "grad_norm": 0.09171772003173828, | |
| "learning_rate": 4.5785714285714285e-06, | |
| "loss": 0.0298, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "grad_norm": 0.07112763822078705, | |
| "learning_rate": 4.5071428571428576e-06, | |
| "loss": 0.0289, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 7.291428571428572, | |
| "grad_norm": 0.06696333736181259, | |
| "learning_rate": 4.435714285714286e-06, | |
| "loss": 0.0301, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 7.3028571428571425, | |
| "grad_norm": 0.0801989883184433, | |
| "learning_rate": 4.364285714285715e-06, | |
| "loss": 0.0313, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 7.314285714285714, | |
| "grad_norm": 0.06183260679244995, | |
| "learning_rate": 4.292857142857143e-06, | |
| "loss": 0.0305, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 7.325714285714286, | |
| "grad_norm": 0.06928607076406479, | |
| "learning_rate": 4.221428571428571e-06, | |
| "loss": 0.0307, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 7.337142857142857, | |
| "grad_norm": 0.06948118656873703, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.0307, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 7.348571428571429, | |
| "grad_norm": 0.06645726412534714, | |
| "learning_rate": 4.078571428571429e-06, | |
| "loss": 0.0303, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "grad_norm": 0.0641820877790451, | |
| "learning_rate": 4.007142857142857e-06, | |
| "loss": 0.0291, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 7.371428571428572, | |
| "grad_norm": 0.07766878604888916, | |
| "learning_rate": 3.935714285714285e-06, | |
| "loss": 0.0308, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 7.382857142857143, | |
| "grad_norm": 0.07369673252105713, | |
| "learning_rate": 3.8642857142857144e-06, | |
| "loss": 0.0308, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 7.394285714285714, | |
| "grad_norm": 0.0729992687702179, | |
| "learning_rate": 3.792857142857143e-06, | |
| "loss": 0.0292, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 7.405714285714286, | |
| "grad_norm": 0.07691550254821777, | |
| "learning_rate": 3.7214285714285716e-06, | |
| "loss": 0.0294, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 7.417142857142857, | |
| "grad_norm": 0.07256300747394562, | |
| "learning_rate": 3.6499999999999998e-06, | |
| "loss": 0.0298, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 7.428571428571429, | |
| "grad_norm": 0.12671434879302979, | |
| "learning_rate": 3.5785714285714292e-06, | |
| "loss": 0.0299, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "grad_norm": 0.09428369998931885, | |
| "learning_rate": 3.5071428571428574e-06, | |
| "loss": 0.028, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 7.451428571428571, | |
| "grad_norm": 0.16297385096549988, | |
| "learning_rate": 3.435714285714286e-06, | |
| "loss": 0.0305, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 7.462857142857143, | |
| "grad_norm": 0.06254955381155014, | |
| "learning_rate": 3.364285714285714e-06, | |
| "loss": 0.0307, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 7.474285714285714, | |
| "grad_norm": 0.13530980050563812, | |
| "learning_rate": 3.2928571428571427e-06, | |
| "loss": 0.0309, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 7.485714285714286, | |
| "grad_norm": 0.06885895878076553, | |
| "learning_rate": 3.2214285714285717e-06, | |
| "loss": 0.0292, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 7.497142857142857, | |
| "grad_norm": 0.06638657301664352, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.03, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 7.508571428571429, | |
| "grad_norm": 0.13830330967903137, | |
| "learning_rate": 3.0785714285714285e-06, | |
| "loss": 0.0322, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "grad_norm": 0.09429346024990082, | |
| "learning_rate": 3.0071428571428575e-06, | |
| "loss": 0.0304, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 7.531428571428571, | |
| "grad_norm": 0.0697278156876564, | |
| "learning_rate": 2.9357142857142857e-06, | |
| "loss": 0.0308, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 7.542857142857143, | |
| "grad_norm": 0.06435400992631912, | |
| "learning_rate": 2.8642857142857147e-06, | |
| "loss": 0.0301, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 7.554285714285714, | |
| "grad_norm": 0.08088187128305435, | |
| "learning_rate": 2.792857142857143e-06, | |
| "loss": 0.031, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 7.565714285714286, | |
| "grad_norm": 0.06981631368398666, | |
| "learning_rate": 2.7214285714285714e-06, | |
| "loss": 0.0301, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 7.577142857142857, | |
| "grad_norm": 0.10085096210241318, | |
| "learning_rate": 2.65e-06, | |
| "loss": 0.0311, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 7.588571428571429, | |
| "grad_norm": 0.15587645769119263, | |
| "learning_rate": 2.5785714285714286e-06, | |
| "loss": 0.0295, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "grad_norm": 0.21229258179664612, | |
| "learning_rate": 2.5071428571428572e-06, | |
| "loss": 0.032, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 7.611428571428571, | |
| "grad_norm": 0.0634523332118988, | |
| "learning_rate": 2.435714285714286e-06, | |
| "loss": 0.0297, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 7.622857142857143, | |
| "grad_norm": 0.08393410593271255, | |
| "learning_rate": 2.3642857142857144e-06, | |
| "loss": 0.0304, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 7.634285714285714, | |
| "grad_norm": 0.07760918885469437, | |
| "learning_rate": 2.292857142857143e-06, | |
| "loss": 0.0298, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 7.645714285714286, | |
| "grad_norm": 0.08117687702178955, | |
| "learning_rate": 2.2214285714285716e-06, | |
| "loss": 0.031, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 7.6571428571428575, | |
| "grad_norm": 0.0747959315776825, | |
| "learning_rate": 2.1499999999999997e-06, | |
| "loss": 0.0288, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 7.668571428571429, | |
| "grad_norm": 0.15194258093833923, | |
| "learning_rate": 2.0785714285714288e-06, | |
| "loss": 0.0326, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "grad_norm": 0.0686679482460022, | |
| "learning_rate": 2.007142857142857e-06, | |
| "loss": 0.0301, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 7.691428571428571, | |
| "grad_norm": 0.07142027467489243, | |
| "learning_rate": 1.935714285714286e-06, | |
| "loss": 0.031, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 7.702857142857143, | |
| "grad_norm": 0.07775787264108658, | |
| "learning_rate": 1.8642857142857143e-06, | |
| "loss": 0.0291, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 7.714285714285714, | |
| "grad_norm": 0.07410398125648499, | |
| "learning_rate": 1.792857142857143e-06, | |
| "loss": 0.0308, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 7.725714285714286, | |
| "grad_norm": 0.07670729607343674, | |
| "learning_rate": 1.7214285714285715e-06, | |
| "loss": 0.0304, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 7.737142857142857, | |
| "grad_norm": 0.07260814309120178, | |
| "learning_rate": 1.65e-06, | |
| "loss": 0.0302, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 7.748571428571428, | |
| "grad_norm": 0.06961925327777863, | |
| "learning_rate": 1.5785714285714285e-06, | |
| "loss": 0.03, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "grad_norm": 0.09042750298976898, | |
| "learning_rate": 1.507142857142857e-06, | |
| "loss": 0.0304, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 7.771428571428571, | |
| "grad_norm": 0.07849057763814926, | |
| "learning_rate": 1.4357142857142856e-06, | |
| "loss": 0.0313, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 7.782857142857143, | |
| "grad_norm": 0.0724598690867424, | |
| "learning_rate": 1.3642857142857142e-06, | |
| "loss": 0.0296, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 7.7942857142857145, | |
| "grad_norm": 0.0791030079126358, | |
| "learning_rate": 1.2928571428571428e-06, | |
| "loss": 0.0325, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 7.805714285714286, | |
| "grad_norm": 0.06429225951433182, | |
| "learning_rate": 1.2214285714285714e-06, | |
| "loss": 0.0299, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 7.817142857142857, | |
| "grad_norm": 0.06754273921251297, | |
| "learning_rate": 1.15e-06, | |
| "loss": 0.0295, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 7.828571428571428, | |
| "grad_norm": 0.15404383838176727, | |
| "learning_rate": 1.0785714285714286e-06, | |
| "loss": 0.0307, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "grad_norm": 0.07046996057033539, | |
| "learning_rate": 1.0071428571428572e-06, | |
| "loss": 0.029, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 7.851428571428571, | |
| "grad_norm": 0.09521915763616562, | |
| "learning_rate": 9.357142857142858e-07, | |
| "loss": 0.0295, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 7.862857142857143, | |
| "grad_norm": 0.08006428182125092, | |
| "learning_rate": 8.642857142857144e-07, | |
| "loss": 0.0282, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 7.8742857142857146, | |
| "grad_norm": 0.08232620358467102, | |
| "learning_rate": 7.92857142857143e-07, | |
| "loss": 0.0304, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 7.885714285714286, | |
| "grad_norm": 0.0687454491853714, | |
| "learning_rate": 7.214285714285714e-07, | |
| "loss": 0.0301, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 7.897142857142857, | |
| "grad_norm": 0.11050191521644592, | |
| "learning_rate": 6.5e-07, | |
| "loss": 0.0304, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 7.908571428571428, | |
| "grad_norm": 0.06012315675616264, | |
| "learning_rate": 5.785714285714286e-07, | |
| "loss": 0.0309, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "grad_norm": 0.07466232031583786, | |
| "learning_rate": 5.071428571428572e-07, | |
| "loss": 0.0312, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 7.9314285714285715, | |
| "grad_norm": 0.07779434323310852, | |
| "learning_rate": 4.3571428571428574e-07, | |
| "loss": 0.0315, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 7.942857142857143, | |
| "grad_norm": 0.05634528398513794, | |
| "learning_rate": 3.6428571428571433e-07, | |
| "loss": 0.0296, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 7.954285714285715, | |
| "grad_norm": 0.0750165656208992, | |
| "learning_rate": 2.9285714285714287e-07, | |
| "loss": 0.0315, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 7.965714285714286, | |
| "grad_norm": 0.0592956617474556, | |
| "learning_rate": 2.2142857142857143e-07, | |
| "loss": 0.0306, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 7.977142857142857, | |
| "grad_norm": 0.06860347837209702, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 0.0302, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 7.988571428571428, | |
| "grad_norm": 0.06017826497554779, | |
| "learning_rate": 7.857142857142858e-08, | |
| "loss": 0.0304, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 0.0971122533082962, | |
| "learning_rate": 7.142857142857144e-09, | |
| "loss": 0.0311, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_bleu": 0.07, | |
| "eval_exact_match": 0.0, | |
| "eval_loss": 0.028088832274079323, | |
| "eval_runtime": 343.24, | |
| "eval_samples_per_second": 17.48, | |
| "eval_steps_per_second": 1.093, | |
| "step": 7000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 7000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 8, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.929403150516224e+16, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |