{ "best_global_step": 4000, "best_metric": 0.7816670479676657, "best_model_checkpoint": "./SALAMA_NEW8/checkpoint-4000", "epoch": 1.597563411224286, "eval_steps": 2000, "global_step": 4000, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.003994407829039345, "grad_norm": 2.2177462577819824, "learning_rate": 1.8e-07, "loss": 0.022, "step": 10 }, { "epoch": 0.00798881565807869, "grad_norm": 2.724672794342041, "learning_rate": 3.8e-07, "loss": 0.0249, "step": 20 }, { "epoch": 0.011983223487118035, "grad_norm": 2.5243964195251465, "learning_rate": 5.800000000000001e-07, "loss": 0.0215, "step": 30 }, { "epoch": 0.01597763131615738, "grad_norm": 2.4517173767089844, "learning_rate": 7.8e-07, "loss": 0.0229, "step": 40 }, { "epoch": 0.019972039145196723, "grad_norm": 1.1104662418365479, "learning_rate": 9.800000000000001e-07, "loss": 0.0212, "step": 50 }, { "epoch": 0.02396644697423607, "grad_norm": 3.2835190296173096, "learning_rate": 1.1800000000000001e-06, "loss": 0.0177, "step": 60 }, { "epoch": 0.027960854803275415, "grad_norm": 2.933971405029297, "learning_rate": 1.3800000000000001e-06, "loss": 0.0181, "step": 70 }, { "epoch": 0.03195526263231476, "grad_norm": 2.667775869369507, "learning_rate": 1.5800000000000001e-06, "loss": 0.0243, "step": 80 }, { "epoch": 0.035949670461354104, "grad_norm": 2.8503150939941406, "learning_rate": 1.7800000000000001e-06, "loss": 0.0158, "step": 90 }, { "epoch": 0.039944078290393446, "grad_norm": 2.6173582077026367, "learning_rate": 1.98e-06, "loss": 0.0217, "step": 100 }, { "epoch": 0.043938486119432796, "grad_norm": 2.148167371749878, "learning_rate": 2.1800000000000003e-06, "loss": 0.0216, "step": 110 }, { "epoch": 0.04793289394847214, "grad_norm": 1.6248950958251953, "learning_rate": 2.38e-06, "loss": 0.0211, "step": 120 }, { "epoch": 0.05192730177751148, "grad_norm": 2.5831820964813232, "learning_rate": 2.5800000000000003e-06, "loss": 0.0282, "step": 130 }, { "epoch": 0.05592170960655083, "grad_norm": 2.1337149143218994, "learning_rate": 2.7800000000000005e-06, "loss": 0.0215, "step": 140 }, { "epoch": 0.05991611743559017, "grad_norm": 2.95695161819458, "learning_rate": 2.9800000000000003e-06, "loss": 0.0177, "step": 150 }, { "epoch": 0.06391052526462952, "grad_norm": 2.277665853500366, "learning_rate": 3.1800000000000005e-06, "loss": 0.0217, "step": 160 }, { "epoch": 0.06790493309366886, "grad_norm": 2.531909704208374, "learning_rate": 3.3800000000000007e-06, "loss": 0.0375, "step": 170 }, { "epoch": 0.07189934092270821, "grad_norm": 2.5161631107330322, "learning_rate": 3.58e-06, "loss": 0.0262, "step": 180 }, { "epoch": 0.07589374875174755, "grad_norm": 2.9737093448638916, "learning_rate": 3.7800000000000002e-06, "loss": 0.0237, "step": 190 }, { "epoch": 0.07988815658078689, "grad_norm": 2.2957046031951904, "learning_rate": 3.980000000000001e-06, "loss": 0.0211, "step": 200 }, { "epoch": 0.08388256440982625, "grad_norm": 1.9400784969329834, "learning_rate": 4.18e-06, "loss": 0.0241, "step": 210 }, { "epoch": 0.08787697223886559, "grad_norm": 3.7293314933776855, "learning_rate": 4.38e-06, "loss": 0.0263, "step": 220 }, { "epoch": 0.09187138006790493, "grad_norm": 4.266306400299072, "learning_rate": 4.58e-06, "loss": 0.0387, "step": 230 }, { "epoch": 0.09586578789694428, "grad_norm": 2.510282278060913, "learning_rate": 4.78e-06, "loss": 0.029, "step": 240 }, { "epoch": 0.09986019572598362, "grad_norm": 1.4938842058181763, "learning_rate": 4.980000000000001e-06, "loss": 0.0154, "step": 250 }, { "epoch": 0.10385460355502296, "grad_norm": 2.0076608657836914, "learning_rate": 5.18e-06, "loss": 0.0263, "step": 260 }, { "epoch": 0.10784901138406232, "grad_norm": 2.252916097640991, "learning_rate": 5.380000000000001e-06, "loss": 0.0273, "step": 270 }, { "epoch": 0.11184341921310166, "grad_norm": 0.9760869145393372, "learning_rate": 5.580000000000001e-06, "loss": 0.0227, "step": 280 }, { "epoch": 0.115837827042141, "grad_norm": 1.7485207319259644, "learning_rate": 5.78e-06, "loss": 0.0234, "step": 290 }, { "epoch": 0.11983223487118035, "grad_norm": 2.540903329849243, "learning_rate": 5.98e-06, "loss": 0.0216, "step": 300 }, { "epoch": 0.12382664270021969, "grad_norm": 2.970639228820801, "learning_rate": 6.18e-06, "loss": 0.0244, "step": 310 }, { "epoch": 0.12782105052925904, "grad_norm": 4.563722610473633, "learning_rate": 6.380000000000001e-06, "loss": 0.0332, "step": 320 }, { "epoch": 0.1318154583582984, "grad_norm": 3.670563220977783, "learning_rate": 6.5800000000000005e-06, "loss": 0.0259, "step": 330 }, { "epoch": 0.13580986618733773, "grad_norm": 3.6714515686035156, "learning_rate": 6.780000000000001e-06, "loss": 0.032, "step": 340 }, { "epoch": 0.13980427401637707, "grad_norm": 2.029430389404297, "learning_rate": 6.98e-06, "loss": 0.0264, "step": 350 }, { "epoch": 0.14379868184541642, "grad_norm": 2.609748363494873, "learning_rate": 7.180000000000001e-06, "loss": 0.0369, "step": 360 }, { "epoch": 0.14779308967445576, "grad_norm": 1.438359260559082, "learning_rate": 7.3800000000000005e-06, "loss": 0.0242, "step": 370 }, { "epoch": 0.1517874975034951, "grad_norm": 2.1897497177124023, "learning_rate": 7.58e-06, "loss": 0.02, "step": 380 }, { "epoch": 0.15578190533253444, "grad_norm": 1.9179186820983887, "learning_rate": 7.78e-06, "loss": 0.0366, "step": 390 }, { "epoch": 0.15977631316157379, "grad_norm": 2.6540606021881104, "learning_rate": 7.980000000000002e-06, "loss": 0.0253, "step": 400 }, { "epoch": 0.16377072099061313, "grad_norm": 2.5292742252349854, "learning_rate": 8.18e-06, "loss": 0.0356, "step": 410 }, { "epoch": 0.1677651288196525, "grad_norm": 2.4729959964752197, "learning_rate": 8.380000000000001e-06, "loss": 0.0327, "step": 420 }, { "epoch": 0.17175953664869184, "grad_norm": 3.639272689819336, "learning_rate": 8.580000000000001e-06, "loss": 0.032, "step": 430 }, { "epoch": 0.17575394447773118, "grad_norm": 3.2385575771331787, "learning_rate": 8.78e-06, "loss": 0.0359, "step": 440 }, { "epoch": 0.17974835230677053, "grad_norm": 3.476755142211914, "learning_rate": 8.98e-06, "loss": 0.0286, "step": 450 }, { "epoch": 0.18374276013580987, "grad_norm": 2.612086772918701, "learning_rate": 9.180000000000002e-06, "loss": 0.0228, "step": 460 }, { "epoch": 0.1877371679648492, "grad_norm": 3.5403027534484863, "learning_rate": 9.38e-06, "loss": 0.0243, "step": 470 }, { "epoch": 0.19173157579388855, "grad_norm": 2.7765207290649414, "learning_rate": 9.58e-06, "loss": 0.0348, "step": 480 }, { "epoch": 0.1957259836229279, "grad_norm": 3.589587450027466, "learning_rate": 9.780000000000001e-06, "loss": 0.0323, "step": 490 }, { "epoch": 0.19972039145196724, "grad_norm": 1.8719650506973267, "learning_rate": 9.980000000000001e-06, "loss": 0.0275, "step": 500 }, { "epoch": 0.20371479928100658, "grad_norm": 2.750931739807129, "learning_rate": 9.983636363636364e-06, "loss": 0.0348, "step": 510 }, { "epoch": 0.20770920711004592, "grad_norm": 1.9383965730667114, "learning_rate": 9.965454545454546e-06, "loss": 0.0282, "step": 520 }, { "epoch": 0.2117036149390853, "grad_norm": 3.6504414081573486, "learning_rate": 9.947272727272728e-06, "loss": 0.0316, "step": 530 }, { "epoch": 0.21569802276812464, "grad_norm": 3.26383376121521, "learning_rate": 9.92909090909091e-06, "loss": 0.0273, "step": 540 }, { "epoch": 0.21969243059716398, "grad_norm": 4.769904613494873, "learning_rate": 9.910909090909092e-06, "loss": 0.0295, "step": 550 }, { "epoch": 0.22368683842620332, "grad_norm": 1.143069863319397, "learning_rate": 9.892727272727273e-06, "loss": 0.027, "step": 560 }, { "epoch": 0.22768124625524266, "grad_norm": 2.988460063934326, "learning_rate": 9.874545454545455e-06, "loss": 0.0349, "step": 570 }, { "epoch": 0.231675654084282, "grad_norm": 2.6448323726654053, "learning_rate": 9.856363636363637e-06, "loss": 0.036, "step": 580 }, { "epoch": 0.23567006191332135, "grad_norm": 3.077092409133911, "learning_rate": 9.838181818181819e-06, "loss": 0.0491, "step": 590 }, { "epoch": 0.2396644697423607, "grad_norm": 6.214341163635254, "learning_rate": 9.820000000000001e-06, "loss": 0.0313, "step": 600 }, { "epoch": 0.24365887757140003, "grad_norm": 3.1780426502227783, "learning_rate": 9.801818181818183e-06, "loss": 0.0251, "step": 610 }, { "epoch": 0.24765328540043938, "grad_norm": 1.7672063112258911, "learning_rate": 9.783636363636365e-06, "loss": 0.0259, "step": 620 }, { "epoch": 0.2516476932294787, "grad_norm": 3.9317915439605713, "learning_rate": 9.765454545454546e-06, "loss": 0.0263, "step": 630 }, { "epoch": 0.2556421010585181, "grad_norm": 2.23710298538208, "learning_rate": 9.747272727272728e-06, "loss": 0.0335, "step": 640 }, { "epoch": 0.2596365088875574, "grad_norm": 3.24867844581604, "learning_rate": 9.72909090909091e-06, "loss": 0.0383, "step": 650 }, { "epoch": 0.2636309167165968, "grad_norm": 3.0995874404907227, "learning_rate": 9.710909090909092e-06, "loss": 0.0296, "step": 660 }, { "epoch": 0.2676253245456361, "grad_norm": 1.4767210483551025, "learning_rate": 9.692727272727274e-06, "loss": 0.04, "step": 670 }, { "epoch": 0.27161973237467546, "grad_norm": 1.7182132005691528, "learning_rate": 9.674545454545456e-06, "loss": 0.0294, "step": 680 }, { "epoch": 0.2756141402037148, "grad_norm": 3.0770034790039062, "learning_rate": 9.656363636363637e-06, "loss": 0.0393, "step": 690 }, { "epoch": 0.27960854803275415, "grad_norm": 2.452723503112793, "learning_rate": 9.63818181818182e-06, "loss": 0.0321, "step": 700 }, { "epoch": 0.2836029558617935, "grad_norm": 2.9560225009918213, "learning_rate": 9.620000000000001e-06, "loss": 0.0292, "step": 710 }, { "epoch": 0.28759736369083283, "grad_norm": 2.8095853328704834, "learning_rate": 9.601818181818183e-06, "loss": 0.028, "step": 720 }, { "epoch": 0.2915917715198722, "grad_norm": 3.4851746559143066, "learning_rate": 9.583636363636365e-06, "loss": 0.0296, "step": 730 }, { "epoch": 0.2955861793489115, "grad_norm": 4.2011027336120605, "learning_rate": 9.565454545454547e-06, "loss": 0.0353, "step": 740 }, { "epoch": 0.2995805871779509, "grad_norm": 5.23146390914917, "learning_rate": 9.547272727272728e-06, "loss": 0.0393, "step": 750 }, { "epoch": 0.3035749950069902, "grad_norm": 2.8136441707611084, "learning_rate": 9.52909090909091e-06, "loss": 0.0339, "step": 760 }, { "epoch": 0.30756940283602957, "grad_norm": 3.0712337493896484, "learning_rate": 9.510909090909092e-06, "loss": 0.0414, "step": 770 }, { "epoch": 0.3115638106650689, "grad_norm": 2.1798360347747803, "learning_rate": 9.492727272727274e-06, "loss": 0.03, "step": 780 }, { "epoch": 0.31555821849410826, "grad_norm": 3.7893567085266113, "learning_rate": 9.474545454545456e-06, "loss": 0.0325, "step": 790 }, { "epoch": 0.31955262632314757, "grad_norm": 2.0599608421325684, "learning_rate": 9.456363636363638e-06, "loss": 0.0389, "step": 800 }, { "epoch": 0.32354703415218694, "grad_norm": 2.7613537311553955, "learning_rate": 9.438181818181818e-06, "loss": 0.0284, "step": 810 }, { "epoch": 0.32754144198122626, "grad_norm": 2.630587577819824, "learning_rate": 9.42e-06, "loss": 0.0308, "step": 820 }, { "epoch": 0.3315358498102656, "grad_norm": 2.8372304439544678, "learning_rate": 9.401818181818183e-06, "loss": 0.029, "step": 830 }, { "epoch": 0.335530257639305, "grad_norm": 2.21663236618042, "learning_rate": 9.383636363636365e-06, "loss": 0.0367, "step": 840 }, { "epoch": 0.3395246654683443, "grad_norm": 3.325446367263794, "learning_rate": 9.365454545454547e-06, "loss": 0.0256, "step": 850 }, { "epoch": 0.3435190732973837, "grad_norm": 2.7013704776763916, "learning_rate": 9.347272727272729e-06, "loss": 0.0279, "step": 860 }, { "epoch": 0.347513481126423, "grad_norm": 1.6712983846664429, "learning_rate": 9.32909090909091e-06, "loss": 0.0256, "step": 870 }, { "epoch": 0.35150788895546237, "grad_norm": 3.0498671531677246, "learning_rate": 9.310909090909092e-06, "loss": 0.0351, "step": 880 }, { "epoch": 0.3555022967845017, "grad_norm": 2.060898542404175, "learning_rate": 9.292727272727272e-06, "loss": 0.0349, "step": 890 }, { "epoch": 0.35949670461354105, "grad_norm": 2.191037178039551, "learning_rate": 9.274545454545454e-06, "loss": 0.0236, "step": 900 }, { "epoch": 0.36349111244258037, "grad_norm": 2.7442283630371094, "learning_rate": 9.256363636363636e-06, "loss": 0.0339, "step": 910 }, { "epoch": 0.36748552027161974, "grad_norm": 2.151813268661499, "learning_rate": 9.23818181818182e-06, "loss": 0.0272, "step": 920 }, { "epoch": 0.37147992810065905, "grad_norm": 3.0903069972991943, "learning_rate": 9.220000000000002e-06, "loss": 0.0343, "step": 930 }, { "epoch": 0.3754743359296984, "grad_norm": 3.2857322692871094, "learning_rate": 9.201818181818183e-06, "loss": 0.028, "step": 940 }, { "epoch": 0.3794687437587378, "grad_norm": 2.7144224643707275, "learning_rate": 9.183636363636365e-06, "loss": 0.0224, "step": 950 }, { "epoch": 0.3834631515877771, "grad_norm": 2.985253095626831, "learning_rate": 9.165454545454547e-06, "loss": 0.0297, "step": 960 }, { "epoch": 0.3874575594168165, "grad_norm": 2.6801540851593018, "learning_rate": 9.147272727272727e-06, "loss": 0.0306, "step": 970 }, { "epoch": 0.3914519672458558, "grad_norm": 2.944761276245117, "learning_rate": 9.129090909090909e-06, "loss": 0.0324, "step": 980 }, { "epoch": 0.39544637507489516, "grad_norm": 2.6776340007781982, "learning_rate": 9.11090909090909e-06, "loss": 0.04, "step": 990 }, { "epoch": 0.3994407829039345, "grad_norm": 4.007734775543213, "learning_rate": 9.092727272727273e-06, "loss": 0.0286, "step": 1000 }, { "epoch": 0.40343519073297385, "grad_norm": 3.652127265930176, "learning_rate": 9.074545454545455e-06, "loss": 0.0373, "step": 1010 }, { "epoch": 0.40742959856201316, "grad_norm": 1.8009936809539795, "learning_rate": 9.056363636363638e-06, "loss": 0.024, "step": 1020 }, { "epoch": 0.41142400639105253, "grad_norm": 2.0933914184570312, "learning_rate": 9.03818181818182e-06, "loss": 0.0341, "step": 1030 }, { "epoch": 0.41541841422009185, "grad_norm": 2.683140277862549, "learning_rate": 9.020000000000002e-06, "loss": 0.0473, "step": 1040 }, { "epoch": 0.4194128220491312, "grad_norm": 2.7582523822784424, "learning_rate": 9.001818181818182e-06, "loss": 0.0322, "step": 1050 }, { "epoch": 0.4234072298781706, "grad_norm": 3.6831445693969727, "learning_rate": 8.983636363636364e-06, "loss": 0.0382, "step": 1060 }, { "epoch": 0.4274016377072099, "grad_norm": 2.053619623184204, "learning_rate": 8.965454545454546e-06, "loss": 0.0304, "step": 1070 }, { "epoch": 0.4313960455362493, "grad_norm": 2.5573840141296387, "learning_rate": 8.947272727272727e-06, "loss": 0.0567, "step": 1080 }, { "epoch": 0.4353904533652886, "grad_norm": 3.5455336570739746, "learning_rate": 8.92909090909091e-06, "loss": 0.0338, "step": 1090 }, { "epoch": 0.43938486119432796, "grad_norm": 2.8842337131500244, "learning_rate": 8.910909090909091e-06, "loss": 0.0314, "step": 1100 }, { "epoch": 0.4433792690233673, "grad_norm": 2.779266834259033, "learning_rate": 8.892727272727275e-06, "loss": 0.0486, "step": 1110 }, { "epoch": 0.44737367685240664, "grad_norm": 3.959299325942993, "learning_rate": 8.874545454545456e-06, "loss": 0.0396, "step": 1120 }, { "epoch": 0.45136808468144596, "grad_norm": 2.053091526031494, "learning_rate": 8.856363636363637e-06, "loss": 0.0329, "step": 1130 }, { "epoch": 0.45536249251048533, "grad_norm": 1.1432011127471924, "learning_rate": 8.838181818181818e-06, "loss": 0.0268, "step": 1140 }, { "epoch": 0.45935690033952464, "grad_norm": 2.738510847091675, "learning_rate": 8.82e-06, "loss": 0.0388, "step": 1150 }, { "epoch": 0.463351308168564, "grad_norm": 1.9435960054397583, "learning_rate": 8.801818181818182e-06, "loss": 0.0321, "step": 1160 }, { "epoch": 0.46734571599760333, "grad_norm": 2.462301015853882, "learning_rate": 8.783636363636364e-06, "loss": 0.0344, "step": 1170 }, { "epoch": 0.4713401238266427, "grad_norm": 2.8463451862335205, "learning_rate": 8.765454545454546e-06, "loss": 0.0372, "step": 1180 }, { "epoch": 0.47533453165568207, "grad_norm": 4.407367706298828, "learning_rate": 8.747272727272728e-06, "loss": 0.0399, "step": 1190 }, { "epoch": 0.4793289394847214, "grad_norm": 2.5179121494293213, "learning_rate": 8.72909090909091e-06, "loss": 0.0331, "step": 1200 }, { "epoch": 0.48332334731376075, "grad_norm": 3.2438509464263916, "learning_rate": 8.710909090909091e-06, "loss": 0.0348, "step": 1210 }, { "epoch": 0.48731775514280007, "grad_norm": 2.54004168510437, "learning_rate": 8.692727272727273e-06, "loss": 0.0317, "step": 1220 }, { "epoch": 0.49131216297183944, "grad_norm": 2.8079185485839844, "learning_rate": 8.674545454545455e-06, "loss": 0.0386, "step": 1230 }, { "epoch": 0.49530657080087875, "grad_norm": 4.827033519744873, "learning_rate": 8.656363636363637e-06, "loss": 0.0338, "step": 1240 }, { "epoch": 0.4993009786299181, "grad_norm": 2.919968843460083, "learning_rate": 8.638181818181819e-06, "loss": 0.0265, "step": 1250 }, { "epoch": 0.5032953864589574, "grad_norm": 3.6935982704162598, "learning_rate": 8.62e-06, "loss": 0.0327, "step": 1260 }, { "epoch": 0.5072897942879968, "grad_norm": 4.243749141693115, "learning_rate": 8.601818181818182e-06, "loss": 0.0282, "step": 1270 }, { "epoch": 0.5112842021170362, "grad_norm": 3.979485273361206, "learning_rate": 8.583636363636364e-06, "loss": 0.0242, "step": 1280 }, { "epoch": 0.5152786099460755, "grad_norm": 3.32183575630188, "learning_rate": 8.565454545454546e-06, "loss": 0.0356, "step": 1290 }, { "epoch": 0.5192730177751148, "grad_norm": 2.474187135696411, "learning_rate": 8.547272727272728e-06, "loss": 0.0201, "step": 1300 }, { "epoch": 0.5232674256041542, "grad_norm": 3.150151252746582, "learning_rate": 8.52909090909091e-06, "loss": 0.0304, "step": 1310 }, { "epoch": 0.5272618334331935, "grad_norm": 2.039504289627075, "learning_rate": 8.510909090909092e-06, "loss": 0.045, "step": 1320 }, { "epoch": 0.5312562412622329, "grad_norm": 2.7388851642608643, "learning_rate": 8.492727272727273e-06, "loss": 0.0268, "step": 1330 }, { "epoch": 0.5352506490912722, "grad_norm": 2.9932122230529785, "learning_rate": 8.474545454545455e-06, "loss": 0.0331, "step": 1340 }, { "epoch": 0.5392450569203115, "grad_norm": 3.3818037509918213, "learning_rate": 8.456363636363637e-06, "loss": 0.0365, "step": 1350 }, { "epoch": 0.5432394647493509, "grad_norm": 3.251274585723877, "learning_rate": 8.438181818181819e-06, "loss": 0.0395, "step": 1360 }, { "epoch": 0.5472338725783903, "grad_norm": 3.46567964553833, "learning_rate": 8.42e-06, "loss": 0.0287, "step": 1370 }, { "epoch": 0.5512282804074295, "grad_norm": 2.459820032119751, "learning_rate": 8.401818181818183e-06, "loss": 0.0329, "step": 1380 }, { "epoch": 0.5552226882364689, "grad_norm": 1.735729694366455, "learning_rate": 8.383636363636364e-06, "loss": 0.0313, "step": 1390 }, { "epoch": 0.5592170960655083, "grad_norm": 2.2887370586395264, "learning_rate": 8.365454545454546e-06, "loss": 0.0215, "step": 1400 }, { "epoch": 0.5632115038945477, "grad_norm": 3.110576868057251, "learning_rate": 8.347272727272728e-06, "loss": 0.0213, "step": 1410 }, { "epoch": 0.567205911723587, "grad_norm": 3.1144895553588867, "learning_rate": 8.32909090909091e-06, "loss": 0.0312, "step": 1420 }, { "epoch": 0.5712003195526263, "grad_norm": 2.9777989387512207, "learning_rate": 8.310909090909092e-06, "loss": 0.0322, "step": 1430 }, { "epoch": 0.5751947273816657, "grad_norm": 2.2770936489105225, "learning_rate": 8.292727272727274e-06, "loss": 0.0304, "step": 1440 }, { "epoch": 0.579189135210705, "grad_norm": 3.5356180667877197, "learning_rate": 8.274545454545455e-06, "loss": 0.0347, "step": 1450 }, { "epoch": 0.5831835430397444, "grad_norm": 3.6384565830230713, "learning_rate": 8.256363636363637e-06, "loss": 0.0289, "step": 1460 }, { "epoch": 0.5871779508687837, "grad_norm": 3.053424119949341, "learning_rate": 8.238181818181819e-06, "loss": 0.0356, "step": 1470 }, { "epoch": 0.591172358697823, "grad_norm": 4.9105963706970215, "learning_rate": 8.220000000000001e-06, "loss": 0.0315, "step": 1480 }, { "epoch": 0.5951667665268624, "grad_norm": 3.0485212802886963, "learning_rate": 8.201818181818183e-06, "loss": 0.0298, "step": 1490 }, { "epoch": 0.5991611743559018, "grad_norm": 3.3632636070251465, "learning_rate": 8.183636363636365e-06, "loss": 0.0268, "step": 1500 }, { "epoch": 0.603155582184941, "grad_norm": 2.593235969543457, "learning_rate": 8.165454545454546e-06, "loss": 0.0286, "step": 1510 }, { "epoch": 0.6071499900139804, "grad_norm": 2.542865753173828, "learning_rate": 8.147272727272728e-06, "loss": 0.0317, "step": 1520 }, { "epoch": 0.6111443978430198, "grad_norm": 3.246321201324463, "learning_rate": 8.12909090909091e-06, "loss": 0.0358, "step": 1530 }, { "epoch": 0.6151388056720591, "grad_norm": 4.592155456542969, "learning_rate": 8.110909090909092e-06, "loss": 0.0295, "step": 1540 }, { "epoch": 0.6191332135010985, "grad_norm": 2.1040351390838623, "learning_rate": 8.092727272727274e-06, "loss": 0.044, "step": 1550 }, { "epoch": 0.6231276213301378, "grad_norm": 2.832470417022705, "learning_rate": 8.074545454545456e-06, "loss": 0.0335, "step": 1560 }, { "epoch": 0.6271220291591771, "grad_norm": 3.7737035751342773, "learning_rate": 8.056363636363636e-06, "loss": 0.0289, "step": 1570 }, { "epoch": 0.6311164369882165, "grad_norm": 2.9322657585144043, "learning_rate": 8.038181818181818e-06, "loss": 0.0209, "step": 1580 }, { "epoch": 0.6351108448172559, "grad_norm": 3.0506591796875, "learning_rate": 8.020000000000001e-06, "loss": 0.0277, "step": 1590 }, { "epoch": 0.6391052526462951, "grad_norm": 1.679126501083374, "learning_rate": 8.001818181818183e-06, "loss": 0.0313, "step": 1600 }, { "epoch": 0.6430996604753345, "grad_norm": 1.4025191068649292, "learning_rate": 7.983636363636365e-06, "loss": 0.0342, "step": 1610 }, { "epoch": 0.6470940683043739, "grad_norm": 3.2698376178741455, "learning_rate": 7.965454545454547e-06, "loss": 0.0334, "step": 1620 }, { "epoch": 0.6510884761334133, "grad_norm": 2.007560968399048, "learning_rate": 7.947272727272728e-06, "loss": 0.0361, "step": 1630 }, { "epoch": 0.6550828839624525, "grad_norm": 3.021299123764038, "learning_rate": 7.92909090909091e-06, "loss": 0.0316, "step": 1640 }, { "epoch": 0.6590772917914919, "grad_norm": 3.4559459686279297, "learning_rate": 7.91090909090909e-06, "loss": 0.0336, "step": 1650 }, { "epoch": 0.6630716996205313, "grad_norm": 3.786959648132324, "learning_rate": 7.892727272727272e-06, "loss": 0.0337, "step": 1660 }, { "epoch": 0.6670661074495706, "grad_norm": 2.5222368240356445, "learning_rate": 7.874545454545454e-06, "loss": 0.026, "step": 1670 }, { "epoch": 0.67106051527861, "grad_norm": 2.880535364151001, "learning_rate": 7.856363636363638e-06, "loss": 0.023, "step": 1680 }, { "epoch": 0.6750549231076493, "grad_norm": 3.375427007675171, "learning_rate": 7.83818181818182e-06, "loss": 0.025, "step": 1690 }, { "epoch": 0.6790493309366886, "grad_norm": 1.8976999521255493, "learning_rate": 7.820000000000001e-06, "loss": 0.0267, "step": 1700 }, { "epoch": 0.683043738765728, "grad_norm": 3.0969624519348145, "learning_rate": 7.801818181818183e-06, "loss": 0.0334, "step": 1710 }, { "epoch": 0.6870381465947674, "grad_norm": 2.949564218521118, "learning_rate": 7.783636363636365e-06, "loss": 0.0317, "step": 1720 }, { "epoch": 0.6910325544238066, "grad_norm": 3.1288681030273438, "learning_rate": 7.765454545454545e-06, "loss": 0.036, "step": 1730 }, { "epoch": 0.695026962252846, "grad_norm": 2.337353229522705, "learning_rate": 7.747272727272727e-06, "loss": 0.0233, "step": 1740 }, { "epoch": 0.6990213700818854, "grad_norm": 2.593578577041626, "learning_rate": 7.729090909090909e-06, "loss": 0.0338, "step": 1750 }, { "epoch": 0.7030157779109247, "grad_norm": 2.1465232372283936, "learning_rate": 7.71090909090909e-06, "loss": 0.0289, "step": 1760 }, { "epoch": 0.7070101857399641, "grad_norm": 2.932419538497925, "learning_rate": 7.692727272727273e-06, "loss": 0.0263, "step": 1770 }, { "epoch": 0.7110045935690034, "grad_norm": 4.221918106079102, "learning_rate": 7.674545454545456e-06, "loss": 0.0262, "step": 1780 }, { "epoch": 0.7149990013980427, "grad_norm": 3.3574588298797607, "learning_rate": 7.656363636363638e-06, "loss": 0.0298, "step": 1790 }, { "epoch": 0.7189934092270821, "grad_norm": 3.61956787109375, "learning_rate": 7.63818181818182e-06, "loss": 0.0361, "step": 1800 }, { "epoch": 0.7229878170561215, "grad_norm": 3.8640527725219727, "learning_rate": 7.620000000000001e-06, "loss": 0.0379, "step": 1810 }, { "epoch": 0.7269822248851607, "grad_norm": 3.021359920501709, "learning_rate": 7.6018181818181826e-06, "loss": 0.0256, "step": 1820 }, { "epoch": 0.7309766327142001, "grad_norm": 3.4771692752838135, "learning_rate": 7.583636363636364e-06, "loss": 0.0317, "step": 1830 }, { "epoch": 0.7349710405432395, "grad_norm": 1.799800992012024, "learning_rate": 7.565454545454546e-06, "loss": 0.0318, "step": 1840 }, { "epoch": 0.7389654483722788, "grad_norm": 2.8619134426116943, "learning_rate": 7.547272727272727e-06, "loss": 0.0351, "step": 1850 }, { "epoch": 0.7429598562013181, "grad_norm": 1.9078686237335205, "learning_rate": 7.529090909090909e-06, "loss": 0.0223, "step": 1860 }, { "epoch": 0.7469542640303575, "grad_norm": 3.3508458137512207, "learning_rate": 7.510909090909092e-06, "loss": 0.0219, "step": 1870 }, { "epoch": 0.7509486718593968, "grad_norm": 2.5900681018829346, "learning_rate": 7.492727272727274e-06, "loss": 0.0231, "step": 1880 }, { "epoch": 0.7549430796884362, "grad_norm": 2.878042697906494, "learning_rate": 7.4745454545454554e-06, "loss": 0.0269, "step": 1890 }, { "epoch": 0.7589374875174756, "grad_norm": 2.814326524734497, "learning_rate": 7.456363636363637e-06, "loss": 0.027, "step": 1900 }, { "epoch": 0.7629318953465148, "grad_norm": 3.0135231018066406, "learning_rate": 7.438181818181819e-06, "loss": 0.0363, "step": 1910 }, { "epoch": 0.7669263031755542, "grad_norm": 4.630500793457031, "learning_rate": 7.420000000000001e-06, "loss": 0.0318, "step": 1920 }, { "epoch": 0.7709207110045936, "grad_norm": 2.1792924404144287, "learning_rate": 7.401818181818182e-06, "loss": 0.0299, "step": 1930 }, { "epoch": 0.774915118833633, "grad_norm": 2.716294527053833, "learning_rate": 7.383636363636364e-06, "loss": 0.028, "step": 1940 }, { "epoch": 0.7789095266626722, "grad_norm": 2.591440439224243, "learning_rate": 7.365454545454546e-06, "loss": 0.0311, "step": 1950 }, { "epoch": 0.7829039344917116, "grad_norm": 1.3216569423675537, "learning_rate": 7.3472727272727275e-06, "loss": 0.0213, "step": 1960 }, { "epoch": 0.786898342320751, "grad_norm": 1.4774867296218872, "learning_rate": 7.32909090909091e-06, "loss": 0.0218, "step": 1970 }, { "epoch": 0.7908927501497903, "grad_norm": 2.8911545276641846, "learning_rate": 7.310909090909092e-06, "loss": 0.0251, "step": 1980 }, { "epoch": 0.7948871579788296, "grad_norm": 4.558359146118164, "learning_rate": 7.292727272727274e-06, "loss": 0.023, "step": 1990 }, { "epoch": 0.798881565807869, "grad_norm": 2.78124737739563, "learning_rate": 7.274545454545456e-06, "loss": 0.0361, "step": 2000 }, { "epoch": 0.798881565807869, "eval_loss": 0.017742320895195007, "eval_runtime": 7487.3562, "eval_samples_per_second": 2.675, "eval_steps_per_second": 0.334, "eval_wer": 1.5431796145591616, "step": 2000 }, { "epoch": 0.8028759736369083, "grad_norm": 3.3901844024658203, "learning_rate": 7.256363636363637e-06, "loss": 0.0319, "step": 2010 }, { "epoch": 0.8068703814659477, "grad_norm": 3.4345223903656006, "learning_rate": 7.2381818181818185e-06, "loss": 0.0313, "step": 2020 }, { "epoch": 0.8108647892949871, "grad_norm": 2.4429991245269775, "learning_rate": 7.22e-06, "loss": 0.0255, "step": 2030 }, { "epoch": 0.8148591971240263, "grad_norm": 1.69983971118927, "learning_rate": 7.201818181818182e-06, "loss": 0.0326, "step": 2040 }, { "epoch": 0.8188536049530657, "grad_norm": 1.1749235391616821, "learning_rate": 7.183636363636364e-06, "loss": 0.0265, "step": 2050 }, { "epoch": 0.8228480127821051, "grad_norm": 3.216322422027588, "learning_rate": 7.165454545454547e-06, "loss": 0.0297, "step": 2060 }, { "epoch": 0.8268424206111444, "grad_norm": 2.813265562057495, "learning_rate": 7.1472727272727285e-06, "loss": 0.0284, "step": 2070 }, { "epoch": 0.8308368284401837, "grad_norm": 2.927987575531006, "learning_rate": 7.12909090909091e-06, "loss": 0.0397, "step": 2080 }, { "epoch": 0.8348312362692231, "grad_norm": 3.2884976863861084, "learning_rate": 7.110909090909091e-06, "loss": 0.0319, "step": 2090 }, { "epoch": 0.8388256440982624, "grad_norm": 2.057528495788574, "learning_rate": 7.092727272727273e-06, "loss": 0.0305, "step": 2100 }, { "epoch": 0.8428200519273018, "grad_norm": 2.372114658355713, "learning_rate": 7.074545454545455e-06, "loss": 0.03, "step": 2110 }, { "epoch": 0.8468144597563412, "grad_norm": 2.805551052093506, "learning_rate": 7.056363636363637e-06, "loss": 0.0305, "step": 2120 }, { "epoch": 0.8508088675853804, "grad_norm": 2.1154088973999023, "learning_rate": 7.038181818181819e-06, "loss": 0.026, "step": 2130 }, { "epoch": 0.8548032754144198, "grad_norm": 3.1023805141448975, "learning_rate": 7.0200000000000006e-06, "loss": 0.033, "step": 2140 }, { "epoch": 0.8587976832434592, "grad_norm": 2.8123090267181396, "learning_rate": 7.0018181818181815e-06, "loss": 0.0273, "step": 2150 }, { "epoch": 0.8627920910724985, "grad_norm": 2.0984227657318115, "learning_rate": 6.983636363636365e-06, "loss": 0.0279, "step": 2160 }, { "epoch": 0.8667864989015378, "grad_norm": 4.393746376037598, "learning_rate": 6.965454545454546e-06, "loss": 0.0318, "step": 2170 }, { "epoch": 0.8707809067305772, "grad_norm": 3.9992425441741943, "learning_rate": 6.947272727272728e-06, "loss": 0.0353, "step": 2180 }, { "epoch": 0.8747753145596165, "grad_norm": 1.945391297340393, "learning_rate": 6.92909090909091e-06, "loss": 0.0228, "step": 2190 }, { "epoch": 0.8787697223886559, "grad_norm": 3.7365753650665283, "learning_rate": 6.910909090909092e-06, "loss": 0.0299, "step": 2200 }, { "epoch": 0.8827641302176952, "grad_norm": 2.2218515872955322, "learning_rate": 6.892727272727273e-06, "loss": 0.025, "step": 2210 }, { "epoch": 0.8867585380467345, "grad_norm": 1.6459863185882568, "learning_rate": 6.874545454545455e-06, "loss": 0.0272, "step": 2220 }, { "epoch": 0.8907529458757739, "grad_norm": 1.9987223148345947, "learning_rate": 6.856363636363636e-06, "loss": 0.0316, "step": 2230 }, { "epoch": 0.8947473537048133, "grad_norm": 1.8636083602905273, "learning_rate": 6.838181818181818e-06, "loss": 0.0296, "step": 2240 }, { "epoch": 0.8987417615338527, "grad_norm": 4.068820476531982, "learning_rate": 6.820000000000001e-06, "loss": 0.0377, "step": 2250 }, { "epoch": 0.9027361693628919, "grad_norm": 2.758310079574585, "learning_rate": 6.801818181818183e-06, "loss": 0.0255, "step": 2260 }, { "epoch": 0.9067305771919313, "grad_norm": 2.86716890335083, "learning_rate": 6.7836363636363644e-06, "loss": 0.0302, "step": 2270 }, { "epoch": 0.9107249850209707, "grad_norm": 2.4357964992523193, "learning_rate": 6.765454545454546e-06, "loss": 0.0302, "step": 2280 }, { "epoch": 0.91471939285001, "grad_norm": 2.8732433319091797, "learning_rate": 6.747272727272728e-06, "loss": 0.0289, "step": 2290 }, { "epoch": 0.9187138006790493, "grad_norm": 4.2874836921691895, "learning_rate": 6.72909090909091e-06, "loss": 0.0303, "step": 2300 }, { "epoch": 0.9227082085080887, "grad_norm": 2.6381747722625732, "learning_rate": 6.710909090909091e-06, "loss": 0.029, "step": 2310 }, { "epoch": 0.926702616337128, "grad_norm": 2.819223403930664, "learning_rate": 6.692727272727273e-06, "loss": 0.0261, "step": 2320 }, { "epoch": 0.9306970241661674, "grad_norm": 2.7608511447906494, "learning_rate": 6.674545454545455e-06, "loss": 0.025, "step": 2330 }, { "epoch": 0.9346914319952067, "grad_norm": 2.7692387104034424, "learning_rate": 6.6563636363636365e-06, "loss": 0.0247, "step": 2340 }, { "epoch": 0.938685839824246, "grad_norm": 3.5204415321350098, "learning_rate": 6.638181818181819e-06, "loss": 0.0377, "step": 2350 }, { "epoch": 0.9426802476532854, "grad_norm": 3.3943400382995605, "learning_rate": 6.620000000000001e-06, "loss": 0.0315, "step": 2360 }, { "epoch": 0.9466746554823248, "grad_norm": 3.179027795791626, "learning_rate": 6.601818181818183e-06, "loss": 0.0269, "step": 2370 }, { "epoch": 0.9506690633113641, "grad_norm": 1.89017653465271, "learning_rate": 6.583636363636365e-06, "loss": 0.0294, "step": 2380 }, { "epoch": 0.9546634711404034, "grad_norm": 3.284336566925049, "learning_rate": 6.565454545454546e-06, "loss": 0.0395, "step": 2390 }, { "epoch": 0.9586578789694428, "grad_norm": 2.3796751499176025, "learning_rate": 6.5472727272727275e-06, "loss": 0.0337, "step": 2400 }, { "epoch": 0.9626522867984821, "grad_norm": 2.710698366165161, "learning_rate": 6.529090909090909e-06, "loss": 0.0276, "step": 2410 }, { "epoch": 0.9666466946275215, "grad_norm": Infinity, "learning_rate": 6.510909090909091e-06, "loss": 0.0459, "step": 2420 }, { "epoch": 0.9706411024565608, "grad_norm": 3.24204421043396, "learning_rate": 6.492727272727273e-06, "loss": 0.0345, "step": 2430 }, { "epoch": 0.9746355102856001, "grad_norm": 4.349393844604492, "learning_rate": 6.474545454545456e-06, "loss": 0.0355, "step": 2440 }, { "epoch": 0.9786299181146395, "grad_norm": 2.544041395187378, "learning_rate": 6.4563636363636375e-06, "loss": 0.0326, "step": 2450 }, { "epoch": 0.9826243259436789, "grad_norm": 1.9386078119277954, "learning_rate": 6.438181818181819e-06, "loss": 0.028, "step": 2460 }, { "epoch": 0.9866187337727182, "grad_norm": 2.767906904220581, "learning_rate": 6.42e-06, "loss": 0.0244, "step": 2470 }, { "epoch": 0.9906131416017575, "grad_norm": 3.3528952598571777, "learning_rate": 6.401818181818182e-06, "loss": 0.03, "step": 2480 }, { "epoch": 0.9946075494307969, "grad_norm": 2.875234365463257, "learning_rate": 6.383636363636364e-06, "loss": 0.0323, "step": 2490 }, { "epoch": 0.9986019572598362, "grad_norm": 3.0124990940093994, "learning_rate": 6.365454545454546e-06, "loss": 0.0373, "step": 2500 }, { "epoch": 1.0023966446974235, "grad_norm": 1.3243356943130493, "learning_rate": 6.347272727272728e-06, "loss": 0.0143, "step": 2510 }, { "epoch": 1.006391052526463, "grad_norm": 1.5087229013442993, "learning_rate": 6.3290909090909096e-06, "loss": 0.0078, "step": 2520 }, { "epoch": 1.0103854603555023, "grad_norm": 2.5617003440856934, "learning_rate": 6.3109090909090905e-06, "loss": 0.0155, "step": 2530 }, { "epoch": 1.0143798681845417, "grad_norm": 0.9475475549697876, "learning_rate": 6.292727272727274e-06, "loss": 0.0091, "step": 2540 }, { "epoch": 1.018374276013581, "grad_norm": 1.712641716003418, "learning_rate": 6.274545454545455e-06, "loss": 0.013, "step": 2550 }, { "epoch": 1.0223686838426203, "grad_norm": 0.9213377237319946, "learning_rate": 6.256363636363637e-06, "loss": 0.009, "step": 2560 }, { "epoch": 1.0263630916716597, "grad_norm": 1.4909203052520752, "learning_rate": 6.238181818181819e-06, "loss": 0.0093, "step": 2570 }, { "epoch": 1.030357499500699, "grad_norm": 0.989854633808136, "learning_rate": 6.220000000000001e-06, "loss": 0.0068, "step": 2580 }, { "epoch": 1.0343519073297385, "grad_norm": 1.8446928262710571, "learning_rate": 6.2018181818181824e-06, "loss": 0.0125, "step": 2590 }, { "epoch": 1.0383463151587777, "grad_norm": 0.5228179693222046, "learning_rate": 6.183636363636364e-06, "loss": 0.0074, "step": 2600 }, { "epoch": 1.042340722987817, "grad_norm": 1.9997162818908691, "learning_rate": 6.165454545454545e-06, "loss": 0.0078, "step": 2610 }, { "epoch": 1.0463351308168565, "grad_norm": 0.7548797130584717, "learning_rate": 6.147272727272727e-06, "loss": 0.0132, "step": 2620 }, { "epoch": 1.0503295386458957, "grad_norm": 1.5827348232269287, "learning_rate": 6.12909090909091e-06, "loss": 0.0067, "step": 2630 }, { "epoch": 1.054323946474935, "grad_norm": 1.7578275203704834, "learning_rate": 6.110909090909092e-06, "loss": 0.012, "step": 2640 }, { "epoch": 1.0583183543039745, "grad_norm": 1.5093387365341187, "learning_rate": 6.0927272727272735e-06, "loss": 0.008, "step": 2650 }, { "epoch": 1.0623127621330137, "grad_norm": 2.2290687561035156, "learning_rate": 6.074545454545455e-06, "loss": 0.0055, "step": 2660 }, { "epoch": 1.0663071699620532, "grad_norm": 2.303945779800415, "learning_rate": 6.056363636363637e-06, "loss": 0.0099, "step": 2670 }, { "epoch": 1.0703015777910925, "grad_norm": 0.6369355320930481, "learning_rate": 6.038181818181819e-06, "loss": 0.0063, "step": 2680 }, { "epoch": 1.0742959856201317, "grad_norm": 0.5402636528015137, "learning_rate": 6.02e-06, "loss": 0.0113, "step": 2690 }, { "epoch": 1.0782903934491712, "grad_norm": 1.1112077236175537, "learning_rate": 6.001818181818182e-06, "loss": 0.0121, "step": 2700 }, { "epoch": 1.0822848012782105, "grad_norm": 1.1533619165420532, "learning_rate": 5.983636363636364e-06, "loss": 0.0093, "step": 2710 }, { "epoch": 1.08627920910725, "grad_norm": 0.7646775245666504, "learning_rate": 5.965454545454546e-06, "loss": 0.0115, "step": 2720 }, { "epoch": 1.0902736169362892, "grad_norm": 0.8578062057495117, "learning_rate": 5.947272727272728e-06, "loss": 0.0086, "step": 2730 }, { "epoch": 1.0942680247653285, "grad_norm": 3.2737674713134766, "learning_rate": 5.92909090909091e-06, "loss": 0.0073, "step": 2740 }, { "epoch": 1.098262432594368, "grad_norm": 2.4335756301879883, "learning_rate": 5.910909090909092e-06, "loss": 0.0101, "step": 2750 }, { "epoch": 1.1022568404234072, "grad_norm": 1.416908860206604, "learning_rate": 5.892727272727274e-06, "loss": 0.0072, "step": 2760 }, { "epoch": 1.1062512482524465, "grad_norm": 0.8457365036010742, "learning_rate": 5.874545454545455e-06, "loss": 0.0088, "step": 2770 }, { "epoch": 1.110245656081486, "grad_norm": 1.1250872611999512, "learning_rate": 5.8563636363636365e-06, "loss": 0.0064, "step": 2780 }, { "epoch": 1.1142400639105252, "grad_norm": 1.291493535041809, "learning_rate": 5.838181818181818e-06, "loss": 0.0099, "step": 2790 }, { "epoch": 1.1182344717395647, "grad_norm": 0.5688849091529846, "learning_rate": 5.82e-06, "loss": 0.0085, "step": 2800 }, { "epoch": 1.122228879568604, "grad_norm": 1.1171317100524902, "learning_rate": 5.801818181818182e-06, "loss": 0.0067, "step": 2810 }, { "epoch": 1.1262232873976432, "grad_norm": 2.204411506652832, "learning_rate": 5.783636363636365e-06, "loss": 0.0097, "step": 2820 }, { "epoch": 1.1302176952266827, "grad_norm": 0.7467852830886841, "learning_rate": 5.7654545454545465e-06, "loss": 0.0069, "step": 2830 }, { "epoch": 1.134212103055722, "grad_norm": 1.518353819847107, "learning_rate": 5.747272727272728e-06, "loss": 0.0103, "step": 2840 }, { "epoch": 1.1382065108847614, "grad_norm": 3.4078903198242188, "learning_rate": 5.729090909090909e-06, "loss": 0.0136, "step": 2850 }, { "epoch": 1.1422009187138007, "grad_norm": 0.7704038619995117, "learning_rate": 5.710909090909091e-06, "loss": 0.006, "step": 2860 }, { "epoch": 1.14619532654284, "grad_norm": 0.5476235151290894, "learning_rate": 5.692727272727273e-06, "loss": 0.0069, "step": 2870 }, { "epoch": 1.1501897343718794, "grad_norm": 0.3564358353614807, "learning_rate": 5.674545454545455e-06, "loss": 0.0122, "step": 2880 }, { "epoch": 1.1541841422009187, "grad_norm": 1.082381010055542, "learning_rate": 5.656363636363637e-06, "loss": 0.0078, "step": 2890 }, { "epoch": 1.158178550029958, "grad_norm": 1.2779443264007568, "learning_rate": 5.6381818181818186e-06, "loss": 0.0101, "step": 2900 }, { "epoch": 1.1621729578589974, "grad_norm": 1.3106609582901, "learning_rate": 5.620000000000001e-06, "loss": 0.0099, "step": 2910 }, { "epoch": 1.1661673656880367, "grad_norm": 1.0210970640182495, "learning_rate": 5.601818181818183e-06, "loss": 0.0081, "step": 2920 }, { "epoch": 1.1701617735170762, "grad_norm": 2.3123538494110107, "learning_rate": 5.583636363636364e-06, "loss": 0.0072, "step": 2930 }, { "epoch": 1.1741561813461154, "grad_norm": 1.2326884269714355, "learning_rate": 5.565454545454546e-06, "loss": 0.0072, "step": 2940 }, { "epoch": 1.1781505891751547, "grad_norm": 1.6470218896865845, "learning_rate": 5.547272727272728e-06, "loss": 0.0117, "step": 2950 }, { "epoch": 1.1821449970041942, "grad_norm": 1.5907762050628662, "learning_rate": 5.52909090909091e-06, "loss": 0.0096, "step": 2960 }, { "epoch": 1.1861394048332334, "grad_norm": 1.7873153686523438, "learning_rate": 5.5109090909090914e-06, "loss": 0.0076, "step": 2970 }, { "epoch": 1.190133812662273, "grad_norm": 0.8777304291725159, "learning_rate": 5.492727272727273e-06, "loss": 0.0094, "step": 2980 }, { "epoch": 1.1941282204913122, "grad_norm": 1.556298851966858, "learning_rate": 5.474545454545454e-06, "loss": 0.0098, "step": 2990 }, { "epoch": 1.1981226283203514, "grad_norm": 2.023460865020752, "learning_rate": 5.456363636363636e-06, "loss": 0.0103, "step": 3000 }, { "epoch": 1.202117036149391, "grad_norm": 0.8088117837905884, "learning_rate": 5.438181818181819e-06, "loss": 0.0091, "step": 3010 }, { "epoch": 1.2061114439784302, "grad_norm": 1.834544062614441, "learning_rate": 5.420000000000001e-06, "loss": 0.0071, "step": 3020 }, { "epoch": 1.2101058518074694, "grad_norm": 1.0826184749603271, "learning_rate": 5.4018181818181825e-06, "loss": 0.007, "step": 3030 }, { "epoch": 1.214100259636509, "grad_norm": 1.198883056640625, "learning_rate": 5.383636363636364e-06, "loss": 0.0074, "step": 3040 }, { "epoch": 1.2180946674655482, "grad_norm": 1.476663589477539, "learning_rate": 5.365454545454546e-06, "loss": 0.0084, "step": 3050 }, { "epoch": 1.2220890752945877, "grad_norm": 2.082937717437744, "learning_rate": 5.347272727272728e-06, "loss": 0.0081, "step": 3060 }, { "epoch": 1.226083483123627, "grad_norm": 1.7027249336242676, "learning_rate": 5.329090909090909e-06, "loss": 0.0055, "step": 3070 }, { "epoch": 1.2300778909526662, "grad_norm": 1.8248317241668701, "learning_rate": 5.310909090909091e-06, "loss": 0.0106, "step": 3080 }, { "epoch": 1.2340722987817057, "grad_norm": 0.6236146092414856, "learning_rate": 5.292727272727273e-06, "loss": 0.0064, "step": 3090 }, { "epoch": 1.238066706610745, "grad_norm": 1.895790934562683, "learning_rate": 5.274545454545455e-06, "loss": 0.0062, "step": 3100 }, { "epoch": 1.2420611144397844, "grad_norm": 2.1608922481536865, "learning_rate": 5.256363636363637e-06, "loss": 0.0077, "step": 3110 }, { "epoch": 1.2460555222688237, "grad_norm": 0.884495735168457, "learning_rate": 5.238181818181819e-06, "loss": 0.005, "step": 3120 }, { "epoch": 1.250049930097863, "grad_norm": 1.3650517463684082, "learning_rate": 5.220000000000001e-06, "loss": 0.0071, "step": 3130 }, { "epoch": 1.2540443379269024, "grad_norm": 1.8378918170928955, "learning_rate": 5.201818181818183e-06, "loss": 0.0078, "step": 3140 }, { "epoch": 1.2580387457559417, "grad_norm": 0.3236289322376251, "learning_rate": 5.183636363636364e-06, "loss": 0.0106, "step": 3150 }, { "epoch": 1.262033153584981, "grad_norm": 1.896931529045105, "learning_rate": 5.1654545454545455e-06, "loss": 0.0103, "step": 3160 }, { "epoch": 1.2660275614140204, "grad_norm": 0.38708317279815674, "learning_rate": 5.147272727272727e-06, "loss": 0.0101, "step": 3170 }, { "epoch": 1.2700219692430597, "grad_norm": 0.8924418687820435, "learning_rate": 5.129090909090909e-06, "loss": 0.0084, "step": 3180 }, { "epoch": 1.2740163770720991, "grad_norm": 1.9992389678955078, "learning_rate": 5.110909090909091e-06, "loss": 0.008, "step": 3190 }, { "epoch": 1.2780107849011384, "grad_norm": 6.68002462387085, "learning_rate": 5.092727272727274e-06, "loss": 0.0079, "step": 3200 }, { "epoch": 1.2820051927301779, "grad_norm": 0.4097543954849243, "learning_rate": 5.0745454545454555e-06, "loss": 0.007, "step": 3210 }, { "epoch": 1.2859996005592171, "grad_norm": 2.126607656478882, "learning_rate": 5.056363636363637e-06, "loss": 0.0085, "step": 3220 }, { "epoch": 1.2899940083882564, "grad_norm": 0.7770309448242188, "learning_rate": 5.038181818181818e-06, "loss": 0.0092, "step": 3230 }, { "epoch": 1.2939884162172959, "grad_norm": 1.09266996383667, "learning_rate": 5.02e-06, "loss": 0.0154, "step": 3240 }, { "epoch": 1.2979828240463351, "grad_norm": 0.644081175327301, "learning_rate": 5.001818181818182e-06, "loss": 0.0109, "step": 3250 }, { "epoch": 1.3019772318753744, "grad_norm": 0.40339845418930054, "learning_rate": 4.983636363636364e-06, "loss": 0.0136, "step": 3260 }, { "epoch": 1.3059716397044139, "grad_norm": 1.0117156505584717, "learning_rate": 4.965454545454546e-06, "loss": 0.0079, "step": 3270 }, { "epoch": 1.3099660475334531, "grad_norm": 0.6380696296691895, "learning_rate": 4.9472727272727276e-06, "loss": 0.0055, "step": 3280 }, { "epoch": 1.3139604553624924, "grad_norm": 2.3137929439544678, "learning_rate": 4.929090909090909e-06, "loss": 0.0081, "step": 3290 }, { "epoch": 1.3179548631915319, "grad_norm": 1.307806372642517, "learning_rate": 4.910909090909091e-06, "loss": 0.0107, "step": 3300 }, { "epoch": 1.3219492710205711, "grad_norm": 1.1640592813491821, "learning_rate": 4.892727272727273e-06, "loss": 0.0071, "step": 3310 }, { "epoch": 1.3259436788496106, "grad_norm": 0.8527657389640808, "learning_rate": 4.874545454545455e-06, "loss": 0.0075, "step": 3320 }, { "epoch": 1.3299380866786499, "grad_norm": 1.5638917684555054, "learning_rate": 4.856363636363637e-06, "loss": 0.0077, "step": 3330 }, { "epoch": 1.3339324945076894, "grad_norm": 3.4176924228668213, "learning_rate": 4.838181818181819e-06, "loss": 0.0134, "step": 3340 }, { "epoch": 1.3379269023367286, "grad_norm": 1.353973388671875, "learning_rate": 4.8200000000000004e-06, "loss": 0.0063, "step": 3350 }, { "epoch": 1.3419213101657679, "grad_norm": 1.280344843864441, "learning_rate": 4.801818181818182e-06, "loss": 0.0131, "step": 3360 }, { "epoch": 1.3459157179948074, "grad_norm": 1.0167405605316162, "learning_rate": 4.783636363636364e-06, "loss": 0.0091, "step": 3370 }, { "epoch": 1.3499101258238466, "grad_norm": 0.347282350063324, "learning_rate": 4.765454545454546e-06, "loss": 0.0066, "step": 3380 }, { "epoch": 1.3539045336528859, "grad_norm": 0.9135898351669312, "learning_rate": 4.747272727272728e-06, "loss": 0.0093, "step": 3390 }, { "epoch": 1.3578989414819254, "grad_norm": 0.4328613877296448, "learning_rate": 4.72909090909091e-06, "loss": 0.0089, "step": 3400 }, { "epoch": 1.3618933493109646, "grad_norm": 0.7179979681968689, "learning_rate": 4.7109090909090915e-06, "loss": 0.0078, "step": 3410 }, { "epoch": 1.3658877571400039, "grad_norm": 1.748172402381897, "learning_rate": 4.692727272727273e-06, "loss": 0.0062, "step": 3420 }, { "epoch": 1.3698821649690434, "grad_norm": 0.30844706296920776, "learning_rate": 4.674545454545455e-06, "loss": 0.0041, "step": 3430 }, { "epoch": 1.3738765727980826, "grad_norm": 1.167373538017273, "learning_rate": 4.656363636363637e-06, "loss": 0.008, "step": 3440 }, { "epoch": 1.377870980627122, "grad_norm": 1.5947142839431763, "learning_rate": 4.638181818181818e-06, "loss": 0.0067, "step": 3450 }, { "epoch": 1.3818653884561614, "grad_norm": 1.5891377925872803, "learning_rate": 4.620000000000001e-06, "loss": 0.007, "step": 3460 }, { "epoch": 1.3858597962852008, "grad_norm": 2.0473077297210693, "learning_rate": 4.6018181818181825e-06, "loss": 0.0096, "step": 3470 }, { "epoch": 1.38985420411424, "grad_norm": 2.16921067237854, "learning_rate": 4.583636363636364e-06, "loss": 0.0084, "step": 3480 }, { "epoch": 1.3938486119432794, "grad_norm": 2.0252652168273926, "learning_rate": 4.565454545454545e-06, "loss": 0.0081, "step": 3490 }, { "epoch": 1.3978430197723188, "grad_norm": 1.2650831937789917, "learning_rate": 4.547272727272727e-06, "loss": 0.0065, "step": 3500 }, { "epoch": 1.401837427601358, "grad_norm": 1.4343613386154175, "learning_rate": 4.52909090909091e-06, "loss": 0.0067, "step": 3510 }, { "epoch": 1.4058318354303974, "grad_norm": 0.9264397025108337, "learning_rate": 4.510909090909092e-06, "loss": 0.0084, "step": 3520 }, { "epoch": 1.4098262432594368, "grad_norm": 0.47705429792404175, "learning_rate": 4.492727272727273e-06, "loss": 0.0147, "step": 3530 }, { "epoch": 1.413820651088476, "grad_norm": 0.33390942215919495, "learning_rate": 4.4745454545454545e-06, "loss": 0.0067, "step": 3540 }, { "epoch": 1.4178150589175154, "grad_norm": 0.7790824174880981, "learning_rate": 4.456363636363637e-06, "loss": 0.0073, "step": 3550 }, { "epoch": 1.4218094667465548, "grad_norm": 1.8825984001159668, "learning_rate": 4.438181818181819e-06, "loss": 0.0119, "step": 3560 }, { "epoch": 1.425803874575594, "grad_norm": 2.6809818744659424, "learning_rate": 4.42e-06, "loss": 0.0076, "step": 3570 }, { "epoch": 1.4297982824046336, "grad_norm": 1.4570934772491455, "learning_rate": 4.401818181818182e-06, "loss": 0.0073, "step": 3580 }, { "epoch": 1.4337926902336728, "grad_norm": 0.7681152820587158, "learning_rate": 4.383636363636364e-06, "loss": 0.0044, "step": 3590 }, { "epoch": 1.4377870980627123, "grad_norm": 0.2887667715549469, "learning_rate": 4.365454545454546e-06, "loss": 0.0058, "step": 3600 }, { "epoch": 1.4417815058917516, "grad_norm": 1.9216909408569336, "learning_rate": 4.347272727272727e-06, "loss": 0.0079, "step": 3610 }, { "epoch": 1.4457759137207908, "grad_norm": 1.959386944770813, "learning_rate": 4.329090909090909e-06, "loss": 0.007, "step": 3620 }, { "epoch": 1.4497703215498303, "grad_norm": 1.421069860458374, "learning_rate": 4.310909090909091e-06, "loss": 0.0064, "step": 3630 }, { "epoch": 1.4537647293788696, "grad_norm": 1.9688565731048584, "learning_rate": 4.292727272727273e-06, "loss": 0.0065, "step": 3640 }, { "epoch": 1.4577591372079088, "grad_norm": 0.46297717094421387, "learning_rate": 4.274545454545455e-06, "loss": 0.0045, "step": 3650 }, { "epoch": 1.4617535450369483, "grad_norm": 0.6084499359130859, "learning_rate": 4.256363636363637e-06, "loss": 0.0063, "step": 3660 }, { "epoch": 1.4657479528659876, "grad_norm": 2.4733595848083496, "learning_rate": 4.238181818181818e-06, "loss": 0.0082, "step": 3670 }, { "epoch": 1.4697423606950268, "grad_norm": 0.8938449025154114, "learning_rate": 4.22e-06, "loss": 0.0056, "step": 3680 }, { "epoch": 1.4737367685240663, "grad_norm": 1.3058514595031738, "learning_rate": 4.201818181818182e-06, "loss": 0.0079, "step": 3690 }, { "epoch": 1.4777311763531056, "grad_norm": 0.42573487758636475, "learning_rate": 4.183636363636364e-06, "loss": 0.0083, "step": 3700 }, { "epoch": 1.481725584182145, "grad_norm": 2.336344003677368, "learning_rate": 4.165454545454546e-06, "loss": 0.0105, "step": 3710 }, { "epoch": 1.4857199920111843, "grad_norm": 0.4930185079574585, "learning_rate": 4.147272727272728e-06, "loss": 0.0083, "step": 3720 }, { "epoch": 1.4897143998402238, "grad_norm": 1.1534149646759033, "learning_rate": 4.1290909090909094e-06, "loss": 0.0057, "step": 3730 }, { "epoch": 1.493708807669263, "grad_norm": 1.0539729595184326, "learning_rate": 4.110909090909091e-06, "loss": 0.0075, "step": 3740 }, { "epoch": 1.4977032154983023, "grad_norm": 1.9535585641860962, "learning_rate": 4.092727272727273e-06, "loss": 0.0081, "step": 3750 }, { "epoch": 1.5016976233273418, "grad_norm": 1.5772063732147217, "learning_rate": 4.074545454545455e-06, "loss": 0.0074, "step": 3760 }, { "epoch": 1.505692031156381, "grad_norm": 1.9321742057800293, "learning_rate": 4.056363636363637e-06, "loss": 0.0076, "step": 3770 }, { "epoch": 1.5096864389854203, "grad_norm": 1.391487717628479, "learning_rate": 4.038181818181819e-06, "loss": 0.0054, "step": 3780 }, { "epoch": 1.5136808468144598, "grad_norm": 0.29940593242645264, "learning_rate": 4.0200000000000005e-06, "loss": 0.0075, "step": 3790 }, { "epoch": 1.517675254643499, "grad_norm": 0.7316534519195557, "learning_rate": 4.001818181818182e-06, "loss": 0.0059, "step": 3800 }, { "epoch": 1.5216696624725383, "grad_norm": 0.8725746273994446, "learning_rate": 3.983636363636364e-06, "loss": 0.0082, "step": 3810 }, { "epoch": 1.5256640703015778, "grad_norm": 1.1659518480300903, "learning_rate": 3.965454545454546e-06, "loss": 0.0056, "step": 3820 }, { "epoch": 1.5296584781306173, "grad_norm": 0.9450637698173523, "learning_rate": 3.947272727272727e-06, "loss": 0.0078, "step": 3830 }, { "epoch": 1.5336528859596563, "grad_norm": 0.7942542433738708, "learning_rate": 3.92909090909091e-06, "loss": 0.0056, "step": 3840 }, { "epoch": 1.5376472937886958, "grad_norm": 2.9248387813568115, "learning_rate": 3.9109090909090915e-06, "loss": 0.0065, "step": 3850 }, { "epoch": 1.5416417016177353, "grad_norm": 0.9460102319717407, "learning_rate": 3.892727272727273e-06, "loss": 0.0063, "step": 3860 }, { "epoch": 1.5456361094467745, "grad_norm": 1.5365135669708252, "learning_rate": 3.874545454545454e-06, "loss": 0.0091, "step": 3870 }, { "epoch": 1.5496305172758138, "grad_norm": 1.6475716829299927, "learning_rate": 3.856363636363636e-06, "loss": 0.005, "step": 3880 }, { "epoch": 1.5536249251048533, "grad_norm": 0.8353217244148254, "learning_rate": 3.838181818181819e-06, "loss": 0.0043, "step": 3890 }, { "epoch": 1.5576193329338925, "grad_norm": 0.36758750677108765, "learning_rate": 3.820000000000001e-06, "loss": 0.004, "step": 3900 }, { "epoch": 1.5616137407629318, "grad_norm": 0.2298198938369751, "learning_rate": 3.801818181818182e-06, "loss": 0.0084, "step": 3910 }, { "epoch": 1.5656081485919713, "grad_norm": 1.170965313911438, "learning_rate": 3.783636363636364e-06, "loss": 0.0067, "step": 3920 }, { "epoch": 1.5696025564210105, "grad_norm": 0.3110823631286621, "learning_rate": 3.765454545454546e-06, "loss": 0.0072, "step": 3930 }, { "epoch": 1.5735969642500498, "grad_norm": 1.0501469373703003, "learning_rate": 3.7472727272727276e-06, "loss": 0.0101, "step": 3940 }, { "epoch": 1.5775913720790893, "grad_norm": 1.3563507795333862, "learning_rate": 3.7290909090909095e-06, "loss": 0.0063, "step": 3950 }, { "epoch": 1.5815857799081288, "grad_norm": 1.415356159210205, "learning_rate": 3.7109090909090913e-06, "loss": 0.0048, "step": 3960 }, { "epoch": 1.5855801877371678, "grad_norm": 1.6954106092453003, "learning_rate": 3.6927272727272727e-06, "loss": 0.0083, "step": 3970 }, { "epoch": 1.5895745955662073, "grad_norm": 0.9005348086357117, "learning_rate": 3.674545454545455e-06, "loss": 0.0093, "step": 3980 }, { "epoch": 1.5935690033952468, "grad_norm": 0.7900477647781372, "learning_rate": 3.656363636363637e-06, "loss": 0.0087, "step": 3990 }, { "epoch": 1.597563411224286, "grad_norm": 1.1566689014434814, "learning_rate": 3.6381818181818187e-06, "loss": 0.0072, "step": 4000 }, { "epoch": 1.597563411224286, "eval_loss": 0.009391536004841328, "eval_runtime": 7517.9662, "eval_samples_per_second": 2.664, "eval_steps_per_second": 0.333, "eval_wer": 0.7816670479676657, "step": 4000 } ], "logging_steps": 10, "max_steps": 6000, "num_input_tokens_seen": 0, "num_train_epochs": 3, "save_steps": 2000, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 3.693085084975104e+19, "train_batch_size": 16, "trial_name": null, "trial_params": null }