| { | |
| "best_global_step": 6000, | |
| "best_metric": 0.4967807301369415, | |
| "best_model_checkpoint": "./SALAMA_NEW8/checkpoint-6000", | |
| "epoch": 2.396245256640703, | |
| "eval_steps": 2000, | |
| "global_step": 6000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003994407829039345, | |
| "grad_norm": 2.2177462577819824, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.022, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.00798881565807869, | |
| "grad_norm": 2.724672794342041, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.0249, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.011983223487118035, | |
| "grad_norm": 2.5243964195251465, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.0215, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01597763131615738, | |
| "grad_norm": 2.4517173767089844, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.0229, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.019972039145196723, | |
| "grad_norm": 1.1104662418365479, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.0212, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02396644697423607, | |
| "grad_norm": 3.2835190296173096, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.0177, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.027960854803275415, | |
| "grad_norm": 2.933971405029297, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.0181, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03195526263231476, | |
| "grad_norm": 2.667775869369507, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.0243, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.035949670461354104, | |
| "grad_norm": 2.8503150939941406, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.0158, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.039944078290393446, | |
| "grad_norm": 2.6173582077026367, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.0217, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.043938486119432796, | |
| "grad_norm": 2.148167371749878, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.0216, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04793289394847214, | |
| "grad_norm": 1.6248950958251953, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.0211, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.05192730177751148, | |
| "grad_norm": 2.5831820964813232, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.0282, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.05592170960655083, | |
| "grad_norm": 2.1337149143218994, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.0215, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05991611743559017, | |
| "grad_norm": 2.95695161819458, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.0177, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06391052526462952, | |
| "grad_norm": 2.277665853500366, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.0217, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06790493309366886, | |
| "grad_norm": 2.531909704208374, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.0375, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.07189934092270821, | |
| "grad_norm": 2.5161631107330322, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0262, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07589374875174755, | |
| "grad_norm": 2.9737093448638916, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.0237, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.07988815658078689, | |
| "grad_norm": 2.2957046031951904, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.0211, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08388256440982625, | |
| "grad_norm": 1.9400784969329834, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.0241, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.08787697223886559, | |
| "grad_norm": 3.7293314933776855, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.0263, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09187138006790493, | |
| "grad_norm": 4.266306400299072, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0387, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09586578789694428, | |
| "grad_norm": 2.510282278060913, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.029, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.09986019572598362, | |
| "grad_norm": 1.4938842058181763, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.0154, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10385460355502296, | |
| "grad_norm": 2.0076608657836914, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.0263, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.10784901138406232, | |
| "grad_norm": 2.252916097640991, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.0273, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.11184341921310166, | |
| "grad_norm": 0.9760869145393372, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.0227, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.115837827042141, | |
| "grad_norm": 1.7485207319259644, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.0234, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.11983223487118035, | |
| "grad_norm": 2.540903329849243, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.0216, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12382664270021969, | |
| "grad_norm": 2.970639228820801, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.0244, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.12782105052925904, | |
| "grad_norm": 4.563722610473633, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.0332, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1318154583582984, | |
| "grad_norm": 3.670563220977783, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.0259, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.13580986618733773, | |
| "grad_norm": 3.6714515686035156, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.032, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.13980427401637707, | |
| "grad_norm": 2.029430389404297, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.0264, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.14379868184541642, | |
| "grad_norm": 2.609748363494873, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.0369, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.14779308967445576, | |
| "grad_norm": 1.438359260559082, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.0242, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1517874975034951, | |
| "grad_norm": 2.1897497177124023, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.02, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.15578190533253444, | |
| "grad_norm": 1.9179186820983887, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.0366, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.15977631316157379, | |
| "grad_norm": 2.6540606021881104, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.0253, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.16377072099061313, | |
| "grad_norm": 2.5292742252349854, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.0356, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.1677651288196525, | |
| "grad_norm": 2.4729959964752197, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.0327, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.17175953664869184, | |
| "grad_norm": 3.639272689819336, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.032, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.17575394447773118, | |
| "grad_norm": 3.2385575771331787, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.0359, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.17974835230677053, | |
| "grad_norm": 3.476755142211914, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.0286, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.18374276013580987, | |
| "grad_norm": 2.612086772918701, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.0228, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.1877371679648492, | |
| "grad_norm": 3.5403027534484863, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.0243, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.19173157579388855, | |
| "grad_norm": 2.7765207290649414, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.0348, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.1957259836229279, | |
| "grad_norm": 3.589587450027466, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.0323, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.19972039145196724, | |
| "grad_norm": 1.8719650506973267, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.0275, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.20371479928100658, | |
| "grad_norm": 2.750931739807129, | |
| "learning_rate": 9.983636363636364e-06, | |
| "loss": 0.0348, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.20770920711004592, | |
| "grad_norm": 1.9383965730667114, | |
| "learning_rate": 9.965454545454546e-06, | |
| "loss": 0.0282, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.2117036149390853, | |
| "grad_norm": 3.6504414081573486, | |
| "learning_rate": 9.947272727272728e-06, | |
| "loss": 0.0316, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.21569802276812464, | |
| "grad_norm": 3.26383376121521, | |
| "learning_rate": 9.92909090909091e-06, | |
| "loss": 0.0273, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.21969243059716398, | |
| "grad_norm": 4.769904613494873, | |
| "learning_rate": 9.910909090909092e-06, | |
| "loss": 0.0295, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.22368683842620332, | |
| "grad_norm": 1.143069863319397, | |
| "learning_rate": 9.892727272727273e-06, | |
| "loss": 0.027, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.22768124625524266, | |
| "grad_norm": 2.988460063934326, | |
| "learning_rate": 9.874545454545455e-06, | |
| "loss": 0.0349, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.231675654084282, | |
| "grad_norm": 2.6448323726654053, | |
| "learning_rate": 9.856363636363637e-06, | |
| "loss": 0.036, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.23567006191332135, | |
| "grad_norm": 3.077092409133911, | |
| "learning_rate": 9.838181818181819e-06, | |
| "loss": 0.0491, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.2396644697423607, | |
| "grad_norm": 6.214341163635254, | |
| "learning_rate": 9.820000000000001e-06, | |
| "loss": 0.0313, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.24365887757140003, | |
| "grad_norm": 3.1780426502227783, | |
| "learning_rate": 9.801818181818183e-06, | |
| "loss": 0.0251, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.24765328540043938, | |
| "grad_norm": 1.7672063112258911, | |
| "learning_rate": 9.783636363636365e-06, | |
| "loss": 0.0259, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.2516476932294787, | |
| "grad_norm": 3.9317915439605713, | |
| "learning_rate": 9.765454545454546e-06, | |
| "loss": 0.0263, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.2556421010585181, | |
| "grad_norm": 2.23710298538208, | |
| "learning_rate": 9.747272727272728e-06, | |
| "loss": 0.0335, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2596365088875574, | |
| "grad_norm": 3.24867844581604, | |
| "learning_rate": 9.72909090909091e-06, | |
| "loss": 0.0383, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2636309167165968, | |
| "grad_norm": 3.0995874404907227, | |
| "learning_rate": 9.710909090909092e-06, | |
| "loss": 0.0296, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.2676253245456361, | |
| "grad_norm": 1.4767210483551025, | |
| "learning_rate": 9.692727272727274e-06, | |
| "loss": 0.04, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.27161973237467546, | |
| "grad_norm": 1.7182132005691528, | |
| "learning_rate": 9.674545454545456e-06, | |
| "loss": 0.0294, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.2756141402037148, | |
| "grad_norm": 3.0770034790039062, | |
| "learning_rate": 9.656363636363637e-06, | |
| "loss": 0.0393, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.27960854803275415, | |
| "grad_norm": 2.452723503112793, | |
| "learning_rate": 9.63818181818182e-06, | |
| "loss": 0.0321, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2836029558617935, | |
| "grad_norm": 2.9560225009918213, | |
| "learning_rate": 9.620000000000001e-06, | |
| "loss": 0.0292, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.28759736369083283, | |
| "grad_norm": 2.8095853328704834, | |
| "learning_rate": 9.601818181818183e-06, | |
| "loss": 0.028, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2915917715198722, | |
| "grad_norm": 3.4851746559143066, | |
| "learning_rate": 9.583636363636365e-06, | |
| "loss": 0.0296, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.2955861793489115, | |
| "grad_norm": 4.2011027336120605, | |
| "learning_rate": 9.565454545454547e-06, | |
| "loss": 0.0353, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.2995805871779509, | |
| "grad_norm": 5.23146390914917, | |
| "learning_rate": 9.547272727272728e-06, | |
| "loss": 0.0393, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.3035749950069902, | |
| "grad_norm": 2.8136441707611084, | |
| "learning_rate": 9.52909090909091e-06, | |
| "loss": 0.0339, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.30756940283602957, | |
| "grad_norm": 3.0712337493896484, | |
| "learning_rate": 9.510909090909092e-06, | |
| "loss": 0.0414, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.3115638106650689, | |
| "grad_norm": 2.1798360347747803, | |
| "learning_rate": 9.492727272727274e-06, | |
| "loss": 0.03, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.31555821849410826, | |
| "grad_norm": 3.7893567085266113, | |
| "learning_rate": 9.474545454545456e-06, | |
| "loss": 0.0325, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.31955262632314757, | |
| "grad_norm": 2.0599608421325684, | |
| "learning_rate": 9.456363636363638e-06, | |
| "loss": 0.0389, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.32354703415218694, | |
| "grad_norm": 2.7613537311553955, | |
| "learning_rate": 9.438181818181818e-06, | |
| "loss": 0.0284, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.32754144198122626, | |
| "grad_norm": 2.630587577819824, | |
| "learning_rate": 9.42e-06, | |
| "loss": 0.0308, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.3315358498102656, | |
| "grad_norm": 2.8372304439544678, | |
| "learning_rate": 9.401818181818183e-06, | |
| "loss": 0.029, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.335530257639305, | |
| "grad_norm": 2.21663236618042, | |
| "learning_rate": 9.383636363636365e-06, | |
| "loss": 0.0367, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.3395246654683443, | |
| "grad_norm": 3.325446367263794, | |
| "learning_rate": 9.365454545454547e-06, | |
| "loss": 0.0256, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.3435190732973837, | |
| "grad_norm": 2.7013704776763916, | |
| "learning_rate": 9.347272727272729e-06, | |
| "loss": 0.0279, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.347513481126423, | |
| "grad_norm": 1.6712983846664429, | |
| "learning_rate": 9.32909090909091e-06, | |
| "loss": 0.0256, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.35150788895546237, | |
| "grad_norm": 3.0498671531677246, | |
| "learning_rate": 9.310909090909092e-06, | |
| "loss": 0.0351, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.3555022967845017, | |
| "grad_norm": 2.060898542404175, | |
| "learning_rate": 9.292727272727272e-06, | |
| "loss": 0.0349, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.35949670461354105, | |
| "grad_norm": 2.191037178039551, | |
| "learning_rate": 9.274545454545454e-06, | |
| "loss": 0.0236, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.36349111244258037, | |
| "grad_norm": 2.7442283630371094, | |
| "learning_rate": 9.256363636363636e-06, | |
| "loss": 0.0339, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.36748552027161974, | |
| "grad_norm": 2.151813268661499, | |
| "learning_rate": 9.23818181818182e-06, | |
| "loss": 0.0272, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.37147992810065905, | |
| "grad_norm": 3.0903069972991943, | |
| "learning_rate": 9.220000000000002e-06, | |
| "loss": 0.0343, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.3754743359296984, | |
| "grad_norm": 3.2857322692871094, | |
| "learning_rate": 9.201818181818183e-06, | |
| "loss": 0.028, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.3794687437587378, | |
| "grad_norm": 2.7144224643707275, | |
| "learning_rate": 9.183636363636365e-06, | |
| "loss": 0.0224, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3834631515877771, | |
| "grad_norm": 2.985253095626831, | |
| "learning_rate": 9.165454545454547e-06, | |
| "loss": 0.0297, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3874575594168165, | |
| "grad_norm": 2.6801540851593018, | |
| "learning_rate": 9.147272727272727e-06, | |
| "loss": 0.0306, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.3914519672458558, | |
| "grad_norm": 2.944761276245117, | |
| "learning_rate": 9.129090909090909e-06, | |
| "loss": 0.0324, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.39544637507489516, | |
| "grad_norm": 2.6776340007781982, | |
| "learning_rate": 9.11090909090909e-06, | |
| "loss": 0.04, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.3994407829039345, | |
| "grad_norm": 4.007734775543213, | |
| "learning_rate": 9.092727272727273e-06, | |
| "loss": 0.0286, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.40343519073297385, | |
| "grad_norm": 3.652127265930176, | |
| "learning_rate": 9.074545454545455e-06, | |
| "loss": 0.0373, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.40742959856201316, | |
| "grad_norm": 1.8009936809539795, | |
| "learning_rate": 9.056363636363638e-06, | |
| "loss": 0.024, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.41142400639105253, | |
| "grad_norm": 2.0933914184570312, | |
| "learning_rate": 9.03818181818182e-06, | |
| "loss": 0.0341, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.41541841422009185, | |
| "grad_norm": 2.683140277862549, | |
| "learning_rate": 9.020000000000002e-06, | |
| "loss": 0.0473, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4194128220491312, | |
| "grad_norm": 2.7582523822784424, | |
| "learning_rate": 9.001818181818182e-06, | |
| "loss": 0.0322, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.4234072298781706, | |
| "grad_norm": 3.6831445693969727, | |
| "learning_rate": 8.983636363636364e-06, | |
| "loss": 0.0382, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.4274016377072099, | |
| "grad_norm": 2.053619623184204, | |
| "learning_rate": 8.965454545454546e-06, | |
| "loss": 0.0304, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4313960455362493, | |
| "grad_norm": 2.5573840141296387, | |
| "learning_rate": 8.947272727272727e-06, | |
| "loss": 0.0567, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.4353904533652886, | |
| "grad_norm": 3.5455336570739746, | |
| "learning_rate": 8.92909090909091e-06, | |
| "loss": 0.0338, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.43938486119432796, | |
| "grad_norm": 2.8842337131500244, | |
| "learning_rate": 8.910909090909091e-06, | |
| "loss": 0.0314, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.4433792690233673, | |
| "grad_norm": 2.779266834259033, | |
| "learning_rate": 8.892727272727275e-06, | |
| "loss": 0.0486, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.44737367685240664, | |
| "grad_norm": 3.959299325942993, | |
| "learning_rate": 8.874545454545456e-06, | |
| "loss": 0.0396, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.45136808468144596, | |
| "grad_norm": 2.053091526031494, | |
| "learning_rate": 8.856363636363637e-06, | |
| "loss": 0.0329, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.45536249251048533, | |
| "grad_norm": 1.1432011127471924, | |
| "learning_rate": 8.838181818181818e-06, | |
| "loss": 0.0268, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.45935690033952464, | |
| "grad_norm": 2.738510847091675, | |
| "learning_rate": 8.82e-06, | |
| "loss": 0.0388, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.463351308168564, | |
| "grad_norm": 1.9435960054397583, | |
| "learning_rate": 8.801818181818182e-06, | |
| "loss": 0.0321, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.46734571599760333, | |
| "grad_norm": 2.462301015853882, | |
| "learning_rate": 8.783636363636364e-06, | |
| "loss": 0.0344, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.4713401238266427, | |
| "grad_norm": 2.8463451862335205, | |
| "learning_rate": 8.765454545454546e-06, | |
| "loss": 0.0372, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.47533453165568207, | |
| "grad_norm": 4.407367706298828, | |
| "learning_rate": 8.747272727272728e-06, | |
| "loss": 0.0399, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.4793289394847214, | |
| "grad_norm": 2.5179121494293213, | |
| "learning_rate": 8.72909090909091e-06, | |
| "loss": 0.0331, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.48332334731376075, | |
| "grad_norm": 3.2438509464263916, | |
| "learning_rate": 8.710909090909091e-06, | |
| "loss": 0.0348, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.48731775514280007, | |
| "grad_norm": 2.54004168510437, | |
| "learning_rate": 8.692727272727273e-06, | |
| "loss": 0.0317, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.49131216297183944, | |
| "grad_norm": 2.8079185485839844, | |
| "learning_rate": 8.674545454545455e-06, | |
| "loss": 0.0386, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.49530657080087875, | |
| "grad_norm": 4.827033519744873, | |
| "learning_rate": 8.656363636363637e-06, | |
| "loss": 0.0338, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.4993009786299181, | |
| "grad_norm": 2.919968843460083, | |
| "learning_rate": 8.638181818181819e-06, | |
| "loss": 0.0265, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.5032953864589574, | |
| "grad_norm": 3.6935982704162598, | |
| "learning_rate": 8.62e-06, | |
| "loss": 0.0327, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5072897942879968, | |
| "grad_norm": 4.243749141693115, | |
| "learning_rate": 8.601818181818182e-06, | |
| "loss": 0.0282, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.5112842021170362, | |
| "grad_norm": 3.979485273361206, | |
| "learning_rate": 8.583636363636364e-06, | |
| "loss": 0.0242, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5152786099460755, | |
| "grad_norm": 3.32183575630188, | |
| "learning_rate": 8.565454545454546e-06, | |
| "loss": 0.0356, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.5192730177751148, | |
| "grad_norm": 2.474187135696411, | |
| "learning_rate": 8.547272727272728e-06, | |
| "loss": 0.0201, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5232674256041542, | |
| "grad_norm": 3.150151252746582, | |
| "learning_rate": 8.52909090909091e-06, | |
| "loss": 0.0304, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5272618334331935, | |
| "grad_norm": 2.039504289627075, | |
| "learning_rate": 8.510909090909092e-06, | |
| "loss": 0.045, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5312562412622329, | |
| "grad_norm": 2.7388851642608643, | |
| "learning_rate": 8.492727272727273e-06, | |
| "loss": 0.0268, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5352506490912722, | |
| "grad_norm": 2.9932122230529785, | |
| "learning_rate": 8.474545454545455e-06, | |
| "loss": 0.0331, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.5392450569203115, | |
| "grad_norm": 3.3818037509918213, | |
| "learning_rate": 8.456363636363637e-06, | |
| "loss": 0.0365, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.5432394647493509, | |
| "grad_norm": 3.251274585723877, | |
| "learning_rate": 8.438181818181819e-06, | |
| "loss": 0.0395, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5472338725783903, | |
| "grad_norm": 3.46567964553833, | |
| "learning_rate": 8.42e-06, | |
| "loss": 0.0287, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5512282804074295, | |
| "grad_norm": 2.459820032119751, | |
| "learning_rate": 8.401818181818183e-06, | |
| "loss": 0.0329, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5552226882364689, | |
| "grad_norm": 1.735729694366455, | |
| "learning_rate": 8.383636363636364e-06, | |
| "loss": 0.0313, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5592170960655083, | |
| "grad_norm": 2.2887370586395264, | |
| "learning_rate": 8.365454545454546e-06, | |
| "loss": 0.0215, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.5632115038945477, | |
| "grad_norm": 3.110576868057251, | |
| "learning_rate": 8.347272727272728e-06, | |
| "loss": 0.0213, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.567205911723587, | |
| "grad_norm": 3.1144895553588867, | |
| "learning_rate": 8.32909090909091e-06, | |
| "loss": 0.0312, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.5712003195526263, | |
| "grad_norm": 2.9777989387512207, | |
| "learning_rate": 8.310909090909092e-06, | |
| "loss": 0.0322, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.5751947273816657, | |
| "grad_norm": 2.2770936489105225, | |
| "learning_rate": 8.292727272727274e-06, | |
| "loss": 0.0304, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.579189135210705, | |
| "grad_norm": 3.5356180667877197, | |
| "learning_rate": 8.274545454545455e-06, | |
| "loss": 0.0347, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.5831835430397444, | |
| "grad_norm": 3.6384565830230713, | |
| "learning_rate": 8.256363636363637e-06, | |
| "loss": 0.0289, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.5871779508687837, | |
| "grad_norm": 3.053424119949341, | |
| "learning_rate": 8.238181818181819e-06, | |
| "loss": 0.0356, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.591172358697823, | |
| "grad_norm": 4.9105963706970215, | |
| "learning_rate": 8.220000000000001e-06, | |
| "loss": 0.0315, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.5951667665268624, | |
| "grad_norm": 3.0485212802886963, | |
| "learning_rate": 8.201818181818183e-06, | |
| "loss": 0.0298, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.5991611743559018, | |
| "grad_norm": 3.3632636070251465, | |
| "learning_rate": 8.183636363636365e-06, | |
| "loss": 0.0268, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.603155582184941, | |
| "grad_norm": 2.593235969543457, | |
| "learning_rate": 8.165454545454546e-06, | |
| "loss": 0.0286, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.6071499900139804, | |
| "grad_norm": 2.542865753173828, | |
| "learning_rate": 8.147272727272728e-06, | |
| "loss": 0.0317, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.6111443978430198, | |
| "grad_norm": 3.246321201324463, | |
| "learning_rate": 8.12909090909091e-06, | |
| "loss": 0.0358, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.6151388056720591, | |
| "grad_norm": 4.592155456542969, | |
| "learning_rate": 8.110909090909092e-06, | |
| "loss": 0.0295, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6191332135010985, | |
| "grad_norm": 2.1040351390838623, | |
| "learning_rate": 8.092727272727274e-06, | |
| "loss": 0.044, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6231276213301378, | |
| "grad_norm": 2.832470417022705, | |
| "learning_rate": 8.074545454545456e-06, | |
| "loss": 0.0335, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6271220291591771, | |
| "grad_norm": 3.7737035751342773, | |
| "learning_rate": 8.056363636363636e-06, | |
| "loss": 0.0289, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6311164369882165, | |
| "grad_norm": 2.9322657585144043, | |
| "learning_rate": 8.038181818181818e-06, | |
| "loss": 0.0209, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6351108448172559, | |
| "grad_norm": 3.0506591796875, | |
| "learning_rate": 8.020000000000001e-06, | |
| "loss": 0.0277, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.6391052526462951, | |
| "grad_norm": 1.679126501083374, | |
| "learning_rate": 8.001818181818183e-06, | |
| "loss": 0.0313, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6430996604753345, | |
| "grad_norm": 1.4025191068649292, | |
| "learning_rate": 7.983636363636365e-06, | |
| "loss": 0.0342, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.6470940683043739, | |
| "grad_norm": 3.2698376178741455, | |
| "learning_rate": 7.965454545454547e-06, | |
| "loss": 0.0334, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6510884761334133, | |
| "grad_norm": 2.007560968399048, | |
| "learning_rate": 7.947272727272728e-06, | |
| "loss": 0.0361, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6550828839624525, | |
| "grad_norm": 3.021299123764038, | |
| "learning_rate": 7.92909090909091e-06, | |
| "loss": 0.0316, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.6590772917914919, | |
| "grad_norm": 3.4559459686279297, | |
| "learning_rate": 7.91090909090909e-06, | |
| "loss": 0.0336, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.6630716996205313, | |
| "grad_norm": 3.786959648132324, | |
| "learning_rate": 7.892727272727272e-06, | |
| "loss": 0.0337, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6670661074495706, | |
| "grad_norm": 2.5222368240356445, | |
| "learning_rate": 7.874545454545454e-06, | |
| "loss": 0.026, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.67106051527861, | |
| "grad_norm": 2.880535364151001, | |
| "learning_rate": 7.856363636363638e-06, | |
| "loss": 0.023, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.6750549231076493, | |
| "grad_norm": 3.375427007675171, | |
| "learning_rate": 7.83818181818182e-06, | |
| "loss": 0.025, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.6790493309366886, | |
| "grad_norm": 1.8976999521255493, | |
| "learning_rate": 7.820000000000001e-06, | |
| "loss": 0.0267, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.683043738765728, | |
| "grad_norm": 3.0969624519348145, | |
| "learning_rate": 7.801818181818183e-06, | |
| "loss": 0.0334, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.6870381465947674, | |
| "grad_norm": 2.949564218521118, | |
| "learning_rate": 7.783636363636365e-06, | |
| "loss": 0.0317, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.6910325544238066, | |
| "grad_norm": 3.1288681030273438, | |
| "learning_rate": 7.765454545454545e-06, | |
| "loss": 0.036, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.695026962252846, | |
| "grad_norm": 2.337353229522705, | |
| "learning_rate": 7.747272727272727e-06, | |
| "loss": 0.0233, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.6990213700818854, | |
| "grad_norm": 2.593578577041626, | |
| "learning_rate": 7.729090909090909e-06, | |
| "loss": 0.0338, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.7030157779109247, | |
| "grad_norm": 2.1465232372283936, | |
| "learning_rate": 7.71090909090909e-06, | |
| "loss": 0.0289, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7070101857399641, | |
| "grad_norm": 2.932419538497925, | |
| "learning_rate": 7.692727272727273e-06, | |
| "loss": 0.0263, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7110045935690034, | |
| "grad_norm": 4.221918106079102, | |
| "learning_rate": 7.674545454545456e-06, | |
| "loss": 0.0262, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7149990013980427, | |
| "grad_norm": 3.3574588298797607, | |
| "learning_rate": 7.656363636363638e-06, | |
| "loss": 0.0298, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.7189934092270821, | |
| "grad_norm": 3.61956787109375, | |
| "learning_rate": 7.63818181818182e-06, | |
| "loss": 0.0361, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7229878170561215, | |
| "grad_norm": 3.8640527725219727, | |
| "learning_rate": 7.620000000000001e-06, | |
| "loss": 0.0379, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7269822248851607, | |
| "grad_norm": 3.021359920501709, | |
| "learning_rate": 7.6018181818181826e-06, | |
| "loss": 0.0256, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7309766327142001, | |
| "grad_norm": 3.4771692752838135, | |
| "learning_rate": 7.583636363636364e-06, | |
| "loss": 0.0317, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7349710405432395, | |
| "grad_norm": 1.799800992012024, | |
| "learning_rate": 7.565454545454546e-06, | |
| "loss": 0.0318, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7389654483722788, | |
| "grad_norm": 2.8619134426116943, | |
| "learning_rate": 7.547272727272727e-06, | |
| "loss": 0.0351, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7429598562013181, | |
| "grad_norm": 1.9078686237335205, | |
| "learning_rate": 7.529090909090909e-06, | |
| "loss": 0.0223, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7469542640303575, | |
| "grad_norm": 3.3508458137512207, | |
| "learning_rate": 7.510909090909092e-06, | |
| "loss": 0.0219, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7509486718593968, | |
| "grad_norm": 2.5900681018829346, | |
| "learning_rate": 7.492727272727274e-06, | |
| "loss": 0.0231, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.7549430796884362, | |
| "grad_norm": 2.878042697906494, | |
| "learning_rate": 7.4745454545454554e-06, | |
| "loss": 0.0269, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7589374875174756, | |
| "grad_norm": 2.814326524734497, | |
| "learning_rate": 7.456363636363637e-06, | |
| "loss": 0.027, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.7629318953465148, | |
| "grad_norm": 3.0135231018066406, | |
| "learning_rate": 7.438181818181819e-06, | |
| "loss": 0.0363, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.7669263031755542, | |
| "grad_norm": 4.630500793457031, | |
| "learning_rate": 7.420000000000001e-06, | |
| "loss": 0.0318, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.7709207110045936, | |
| "grad_norm": 2.1792924404144287, | |
| "learning_rate": 7.401818181818182e-06, | |
| "loss": 0.0299, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.774915118833633, | |
| "grad_norm": 2.716294527053833, | |
| "learning_rate": 7.383636363636364e-06, | |
| "loss": 0.028, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.7789095266626722, | |
| "grad_norm": 2.591440439224243, | |
| "learning_rate": 7.365454545454546e-06, | |
| "loss": 0.0311, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.7829039344917116, | |
| "grad_norm": 1.3216569423675537, | |
| "learning_rate": 7.3472727272727275e-06, | |
| "loss": 0.0213, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.786898342320751, | |
| "grad_norm": 1.4774867296218872, | |
| "learning_rate": 7.32909090909091e-06, | |
| "loss": 0.0218, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.7908927501497903, | |
| "grad_norm": 2.8911545276641846, | |
| "learning_rate": 7.310909090909092e-06, | |
| "loss": 0.0251, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.7948871579788296, | |
| "grad_norm": 4.558359146118164, | |
| "learning_rate": 7.292727272727274e-06, | |
| "loss": 0.023, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.798881565807869, | |
| "grad_norm": 2.78124737739563, | |
| "learning_rate": 7.274545454545456e-06, | |
| "loss": 0.0361, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.798881565807869, | |
| "eval_loss": 0.017742320895195007, | |
| "eval_runtime": 7487.3562, | |
| "eval_samples_per_second": 2.675, | |
| "eval_steps_per_second": 0.334, | |
| "eval_wer": 1.5431796145591616, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.8028759736369083, | |
| "grad_norm": 3.3901844024658203, | |
| "learning_rate": 7.256363636363637e-06, | |
| "loss": 0.0319, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.8068703814659477, | |
| "grad_norm": 3.4345223903656006, | |
| "learning_rate": 7.2381818181818185e-06, | |
| "loss": 0.0313, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.8108647892949871, | |
| "grad_norm": 2.4429991245269775, | |
| "learning_rate": 7.22e-06, | |
| "loss": 0.0255, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.8148591971240263, | |
| "grad_norm": 1.69983971118927, | |
| "learning_rate": 7.201818181818182e-06, | |
| "loss": 0.0326, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.8188536049530657, | |
| "grad_norm": 1.1749235391616821, | |
| "learning_rate": 7.183636363636364e-06, | |
| "loss": 0.0265, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.8228480127821051, | |
| "grad_norm": 3.216322422027588, | |
| "learning_rate": 7.165454545454547e-06, | |
| "loss": 0.0297, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.8268424206111444, | |
| "grad_norm": 2.813265562057495, | |
| "learning_rate": 7.1472727272727285e-06, | |
| "loss": 0.0284, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.8308368284401837, | |
| "grad_norm": 2.927987575531006, | |
| "learning_rate": 7.12909090909091e-06, | |
| "loss": 0.0397, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.8348312362692231, | |
| "grad_norm": 3.2884976863861084, | |
| "learning_rate": 7.110909090909091e-06, | |
| "loss": 0.0319, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.8388256440982624, | |
| "grad_norm": 2.057528495788574, | |
| "learning_rate": 7.092727272727273e-06, | |
| "loss": 0.0305, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.8428200519273018, | |
| "grad_norm": 2.372114658355713, | |
| "learning_rate": 7.074545454545455e-06, | |
| "loss": 0.03, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.8468144597563412, | |
| "grad_norm": 2.805551052093506, | |
| "learning_rate": 7.056363636363637e-06, | |
| "loss": 0.0305, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.8508088675853804, | |
| "grad_norm": 2.1154088973999023, | |
| "learning_rate": 7.038181818181819e-06, | |
| "loss": 0.026, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.8548032754144198, | |
| "grad_norm": 3.1023805141448975, | |
| "learning_rate": 7.0200000000000006e-06, | |
| "loss": 0.033, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.8587976832434592, | |
| "grad_norm": 2.8123090267181396, | |
| "learning_rate": 7.0018181818181815e-06, | |
| "loss": 0.0273, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.8627920910724985, | |
| "grad_norm": 2.0984227657318115, | |
| "learning_rate": 6.983636363636365e-06, | |
| "loss": 0.0279, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.8667864989015378, | |
| "grad_norm": 4.393746376037598, | |
| "learning_rate": 6.965454545454546e-06, | |
| "loss": 0.0318, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.8707809067305772, | |
| "grad_norm": 3.9992425441741943, | |
| "learning_rate": 6.947272727272728e-06, | |
| "loss": 0.0353, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.8747753145596165, | |
| "grad_norm": 1.945391297340393, | |
| "learning_rate": 6.92909090909091e-06, | |
| "loss": 0.0228, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.8787697223886559, | |
| "grad_norm": 3.7365753650665283, | |
| "learning_rate": 6.910909090909092e-06, | |
| "loss": 0.0299, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.8827641302176952, | |
| "grad_norm": 2.2218515872955322, | |
| "learning_rate": 6.892727272727273e-06, | |
| "loss": 0.025, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.8867585380467345, | |
| "grad_norm": 1.6459863185882568, | |
| "learning_rate": 6.874545454545455e-06, | |
| "loss": 0.0272, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.8907529458757739, | |
| "grad_norm": 1.9987223148345947, | |
| "learning_rate": 6.856363636363636e-06, | |
| "loss": 0.0316, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.8947473537048133, | |
| "grad_norm": 1.8636083602905273, | |
| "learning_rate": 6.838181818181818e-06, | |
| "loss": 0.0296, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.8987417615338527, | |
| "grad_norm": 4.068820476531982, | |
| "learning_rate": 6.820000000000001e-06, | |
| "loss": 0.0377, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.9027361693628919, | |
| "grad_norm": 2.758310079574585, | |
| "learning_rate": 6.801818181818183e-06, | |
| "loss": 0.0255, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.9067305771919313, | |
| "grad_norm": 2.86716890335083, | |
| "learning_rate": 6.7836363636363644e-06, | |
| "loss": 0.0302, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.9107249850209707, | |
| "grad_norm": 2.4357964992523193, | |
| "learning_rate": 6.765454545454546e-06, | |
| "loss": 0.0302, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.91471939285001, | |
| "grad_norm": 2.8732433319091797, | |
| "learning_rate": 6.747272727272728e-06, | |
| "loss": 0.0289, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.9187138006790493, | |
| "grad_norm": 4.2874836921691895, | |
| "learning_rate": 6.72909090909091e-06, | |
| "loss": 0.0303, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.9227082085080887, | |
| "grad_norm": 2.6381747722625732, | |
| "learning_rate": 6.710909090909091e-06, | |
| "loss": 0.029, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.926702616337128, | |
| "grad_norm": 2.819223403930664, | |
| "learning_rate": 6.692727272727273e-06, | |
| "loss": 0.0261, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.9306970241661674, | |
| "grad_norm": 2.7608511447906494, | |
| "learning_rate": 6.674545454545455e-06, | |
| "loss": 0.025, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.9346914319952067, | |
| "grad_norm": 2.7692387104034424, | |
| "learning_rate": 6.6563636363636365e-06, | |
| "loss": 0.0247, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.938685839824246, | |
| "grad_norm": 3.5204415321350098, | |
| "learning_rate": 6.638181818181819e-06, | |
| "loss": 0.0377, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.9426802476532854, | |
| "grad_norm": 3.3943400382995605, | |
| "learning_rate": 6.620000000000001e-06, | |
| "loss": 0.0315, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.9466746554823248, | |
| "grad_norm": 3.179027795791626, | |
| "learning_rate": 6.601818181818183e-06, | |
| "loss": 0.0269, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.9506690633113641, | |
| "grad_norm": 1.89017653465271, | |
| "learning_rate": 6.583636363636365e-06, | |
| "loss": 0.0294, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.9546634711404034, | |
| "grad_norm": 3.284336566925049, | |
| "learning_rate": 6.565454545454546e-06, | |
| "loss": 0.0395, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.9586578789694428, | |
| "grad_norm": 2.3796751499176025, | |
| "learning_rate": 6.5472727272727275e-06, | |
| "loss": 0.0337, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.9626522867984821, | |
| "grad_norm": 2.710698366165161, | |
| "learning_rate": 6.529090909090909e-06, | |
| "loss": 0.0276, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.9666466946275215, | |
| "grad_norm": Infinity, | |
| "learning_rate": 6.510909090909091e-06, | |
| "loss": 0.0459, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.9706411024565608, | |
| "grad_norm": 3.24204421043396, | |
| "learning_rate": 6.492727272727273e-06, | |
| "loss": 0.0345, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.9746355102856001, | |
| "grad_norm": 4.349393844604492, | |
| "learning_rate": 6.474545454545456e-06, | |
| "loss": 0.0355, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.9786299181146395, | |
| "grad_norm": 2.544041395187378, | |
| "learning_rate": 6.4563636363636375e-06, | |
| "loss": 0.0326, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.9826243259436789, | |
| "grad_norm": 1.9386078119277954, | |
| "learning_rate": 6.438181818181819e-06, | |
| "loss": 0.028, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.9866187337727182, | |
| "grad_norm": 2.767906904220581, | |
| "learning_rate": 6.42e-06, | |
| "loss": 0.0244, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.9906131416017575, | |
| "grad_norm": 3.3528952598571777, | |
| "learning_rate": 6.401818181818182e-06, | |
| "loss": 0.03, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.9946075494307969, | |
| "grad_norm": 2.875234365463257, | |
| "learning_rate": 6.383636363636364e-06, | |
| "loss": 0.0323, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.9986019572598362, | |
| "grad_norm": 3.0124990940093994, | |
| "learning_rate": 6.365454545454546e-06, | |
| "loss": 0.0373, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.0023966446974235, | |
| "grad_norm": 1.3243356943130493, | |
| "learning_rate": 6.347272727272728e-06, | |
| "loss": 0.0143, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.006391052526463, | |
| "grad_norm": 1.5087229013442993, | |
| "learning_rate": 6.3290909090909096e-06, | |
| "loss": 0.0078, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.0103854603555023, | |
| "grad_norm": 2.5617003440856934, | |
| "learning_rate": 6.3109090909090905e-06, | |
| "loss": 0.0155, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.0143798681845417, | |
| "grad_norm": 0.9475475549697876, | |
| "learning_rate": 6.292727272727274e-06, | |
| "loss": 0.0091, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.018374276013581, | |
| "grad_norm": 1.712641716003418, | |
| "learning_rate": 6.274545454545455e-06, | |
| "loss": 0.013, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.0223686838426203, | |
| "grad_norm": 0.9213377237319946, | |
| "learning_rate": 6.256363636363637e-06, | |
| "loss": 0.009, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.0263630916716597, | |
| "grad_norm": 1.4909203052520752, | |
| "learning_rate": 6.238181818181819e-06, | |
| "loss": 0.0093, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.030357499500699, | |
| "grad_norm": 0.989854633808136, | |
| "learning_rate": 6.220000000000001e-06, | |
| "loss": 0.0068, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.0343519073297385, | |
| "grad_norm": 1.8446928262710571, | |
| "learning_rate": 6.2018181818181824e-06, | |
| "loss": 0.0125, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.0383463151587777, | |
| "grad_norm": 0.5228179693222046, | |
| "learning_rate": 6.183636363636364e-06, | |
| "loss": 0.0074, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.042340722987817, | |
| "grad_norm": 1.9997162818908691, | |
| "learning_rate": 6.165454545454545e-06, | |
| "loss": 0.0078, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.0463351308168565, | |
| "grad_norm": 0.7548797130584717, | |
| "learning_rate": 6.147272727272727e-06, | |
| "loss": 0.0132, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.0503295386458957, | |
| "grad_norm": 1.5827348232269287, | |
| "learning_rate": 6.12909090909091e-06, | |
| "loss": 0.0067, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.054323946474935, | |
| "grad_norm": 1.7578275203704834, | |
| "learning_rate": 6.110909090909092e-06, | |
| "loss": 0.012, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.0583183543039745, | |
| "grad_norm": 1.5093387365341187, | |
| "learning_rate": 6.0927272727272735e-06, | |
| "loss": 0.008, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.0623127621330137, | |
| "grad_norm": 2.2290687561035156, | |
| "learning_rate": 6.074545454545455e-06, | |
| "loss": 0.0055, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.0663071699620532, | |
| "grad_norm": 2.303945779800415, | |
| "learning_rate": 6.056363636363637e-06, | |
| "loss": 0.0099, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.0703015777910925, | |
| "grad_norm": 0.6369355320930481, | |
| "learning_rate": 6.038181818181819e-06, | |
| "loss": 0.0063, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.0742959856201317, | |
| "grad_norm": 0.5402636528015137, | |
| "learning_rate": 6.02e-06, | |
| "loss": 0.0113, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.0782903934491712, | |
| "grad_norm": 1.1112077236175537, | |
| "learning_rate": 6.001818181818182e-06, | |
| "loss": 0.0121, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.0822848012782105, | |
| "grad_norm": 1.1533619165420532, | |
| "learning_rate": 5.983636363636364e-06, | |
| "loss": 0.0093, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.08627920910725, | |
| "grad_norm": 0.7646775245666504, | |
| "learning_rate": 5.965454545454546e-06, | |
| "loss": 0.0115, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.0902736169362892, | |
| "grad_norm": 0.8578062057495117, | |
| "learning_rate": 5.947272727272728e-06, | |
| "loss": 0.0086, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.0942680247653285, | |
| "grad_norm": 3.2737674713134766, | |
| "learning_rate": 5.92909090909091e-06, | |
| "loss": 0.0073, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.098262432594368, | |
| "grad_norm": 2.4335756301879883, | |
| "learning_rate": 5.910909090909092e-06, | |
| "loss": 0.0101, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.1022568404234072, | |
| "grad_norm": 1.416908860206604, | |
| "learning_rate": 5.892727272727274e-06, | |
| "loss": 0.0072, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.1062512482524465, | |
| "grad_norm": 0.8457365036010742, | |
| "learning_rate": 5.874545454545455e-06, | |
| "loss": 0.0088, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.110245656081486, | |
| "grad_norm": 1.1250872611999512, | |
| "learning_rate": 5.8563636363636365e-06, | |
| "loss": 0.0064, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.1142400639105252, | |
| "grad_norm": 1.291493535041809, | |
| "learning_rate": 5.838181818181818e-06, | |
| "loss": 0.0099, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.1182344717395647, | |
| "grad_norm": 0.5688849091529846, | |
| "learning_rate": 5.82e-06, | |
| "loss": 0.0085, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.122228879568604, | |
| "grad_norm": 1.1171317100524902, | |
| "learning_rate": 5.801818181818182e-06, | |
| "loss": 0.0067, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.1262232873976432, | |
| "grad_norm": 2.204411506652832, | |
| "learning_rate": 5.783636363636365e-06, | |
| "loss": 0.0097, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.1302176952266827, | |
| "grad_norm": 0.7467852830886841, | |
| "learning_rate": 5.7654545454545465e-06, | |
| "loss": 0.0069, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.134212103055722, | |
| "grad_norm": 1.518353819847107, | |
| "learning_rate": 5.747272727272728e-06, | |
| "loss": 0.0103, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.1382065108847614, | |
| "grad_norm": 3.4078903198242188, | |
| "learning_rate": 5.729090909090909e-06, | |
| "loss": 0.0136, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.1422009187138007, | |
| "grad_norm": 0.7704038619995117, | |
| "learning_rate": 5.710909090909091e-06, | |
| "loss": 0.006, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.14619532654284, | |
| "grad_norm": 0.5476235151290894, | |
| "learning_rate": 5.692727272727273e-06, | |
| "loss": 0.0069, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.1501897343718794, | |
| "grad_norm": 0.3564358353614807, | |
| "learning_rate": 5.674545454545455e-06, | |
| "loss": 0.0122, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.1541841422009187, | |
| "grad_norm": 1.082381010055542, | |
| "learning_rate": 5.656363636363637e-06, | |
| "loss": 0.0078, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.158178550029958, | |
| "grad_norm": 1.2779443264007568, | |
| "learning_rate": 5.6381818181818186e-06, | |
| "loss": 0.0101, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.1621729578589974, | |
| "grad_norm": 1.3106609582901, | |
| "learning_rate": 5.620000000000001e-06, | |
| "loss": 0.0099, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.1661673656880367, | |
| "grad_norm": 1.0210970640182495, | |
| "learning_rate": 5.601818181818183e-06, | |
| "loss": 0.0081, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.1701617735170762, | |
| "grad_norm": 2.3123538494110107, | |
| "learning_rate": 5.583636363636364e-06, | |
| "loss": 0.0072, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.1741561813461154, | |
| "grad_norm": 1.2326884269714355, | |
| "learning_rate": 5.565454545454546e-06, | |
| "loss": 0.0072, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.1781505891751547, | |
| "grad_norm": 1.6470218896865845, | |
| "learning_rate": 5.547272727272728e-06, | |
| "loss": 0.0117, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.1821449970041942, | |
| "grad_norm": 1.5907762050628662, | |
| "learning_rate": 5.52909090909091e-06, | |
| "loss": 0.0096, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.1861394048332334, | |
| "grad_norm": 1.7873153686523438, | |
| "learning_rate": 5.5109090909090914e-06, | |
| "loss": 0.0076, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.190133812662273, | |
| "grad_norm": 0.8777304291725159, | |
| "learning_rate": 5.492727272727273e-06, | |
| "loss": 0.0094, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.1941282204913122, | |
| "grad_norm": 1.556298851966858, | |
| "learning_rate": 5.474545454545454e-06, | |
| "loss": 0.0098, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.1981226283203514, | |
| "grad_norm": 2.023460865020752, | |
| "learning_rate": 5.456363636363636e-06, | |
| "loss": 0.0103, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.202117036149391, | |
| "grad_norm": 0.8088117837905884, | |
| "learning_rate": 5.438181818181819e-06, | |
| "loss": 0.0091, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.2061114439784302, | |
| "grad_norm": 1.834544062614441, | |
| "learning_rate": 5.420000000000001e-06, | |
| "loss": 0.0071, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.2101058518074694, | |
| "grad_norm": 1.0826184749603271, | |
| "learning_rate": 5.4018181818181825e-06, | |
| "loss": 0.007, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.214100259636509, | |
| "grad_norm": 1.198883056640625, | |
| "learning_rate": 5.383636363636364e-06, | |
| "loss": 0.0074, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.2180946674655482, | |
| "grad_norm": 1.476663589477539, | |
| "learning_rate": 5.365454545454546e-06, | |
| "loss": 0.0084, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.2220890752945877, | |
| "grad_norm": 2.082937717437744, | |
| "learning_rate": 5.347272727272728e-06, | |
| "loss": 0.0081, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.226083483123627, | |
| "grad_norm": 1.7027249336242676, | |
| "learning_rate": 5.329090909090909e-06, | |
| "loss": 0.0055, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.2300778909526662, | |
| "grad_norm": 1.8248317241668701, | |
| "learning_rate": 5.310909090909091e-06, | |
| "loss": 0.0106, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.2340722987817057, | |
| "grad_norm": 0.6236146092414856, | |
| "learning_rate": 5.292727272727273e-06, | |
| "loss": 0.0064, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.238066706610745, | |
| "grad_norm": 1.895790934562683, | |
| "learning_rate": 5.274545454545455e-06, | |
| "loss": 0.0062, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.2420611144397844, | |
| "grad_norm": 2.1608922481536865, | |
| "learning_rate": 5.256363636363637e-06, | |
| "loss": 0.0077, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.2460555222688237, | |
| "grad_norm": 0.884495735168457, | |
| "learning_rate": 5.238181818181819e-06, | |
| "loss": 0.005, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.250049930097863, | |
| "grad_norm": 1.3650517463684082, | |
| "learning_rate": 5.220000000000001e-06, | |
| "loss": 0.0071, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.2540443379269024, | |
| "grad_norm": 1.8378918170928955, | |
| "learning_rate": 5.201818181818183e-06, | |
| "loss": 0.0078, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.2580387457559417, | |
| "grad_norm": 0.3236289322376251, | |
| "learning_rate": 5.183636363636364e-06, | |
| "loss": 0.0106, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.262033153584981, | |
| "grad_norm": 1.896931529045105, | |
| "learning_rate": 5.1654545454545455e-06, | |
| "loss": 0.0103, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.2660275614140204, | |
| "grad_norm": 0.38708317279815674, | |
| "learning_rate": 5.147272727272727e-06, | |
| "loss": 0.0101, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.2700219692430597, | |
| "grad_norm": 0.8924418687820435, | |
| "learning_rate": 5.129090909090909e-06, | |
| "loss": 0.0084, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.2740163770720991, | |
| "grad_norm": 1.9992389678955078, | |
| "learning_rate": 5.110909090909091e-06, | |
| "loss": 0.008, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.2780107849011384, | |
| "grad_norm": 6.68002462387085, | |
| "learning_rate": 5.092727272727274e-06, | |
| "loss": 0.0079, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.2820051927301779, | |
| "grad_norm": 0.4097543954849243, | |
| "learning_rate": 5.0745454545454555e-06, | |
| "loss": 0.007, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.2859996005592171, | |
| "grad_norm": 2.126607656478882, | |
| "learning_rate": 5.056363636363637e-06, | |
| "loss": 0.0085, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.2899940083882564, | |
| "grad_norm": 0.7770309448242188, | |
| "learning_rate": 5.038181818181818e-06, | |
| "loss": 0.0092, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.2939884162172959, | |
| "grad_norm": 1.09266996383667, | |
| "learning_rate": 5.02e-06, | |
| "loss": 0.0154, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.2979828240463351, | |
| "grad_norm": 0.644081175327301, | |
| "learning_rate": 5.001818181818182e-06, | |
| "loss": 0.0109, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.3019772318753744, | |
| "grad_norm": 0.40339845418930054, | |
| "learning_rate": 4.983636363636364e-06, | |
| "loss": 0.0136, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.3059716397044139, | |
| "grad_norm": 1.0117156505584717, | |
| "learning_rate": 4.965454545454546e-06, | |
| "loss": 0.0079, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.3099660475334531, | |
| "grad_norm": 0.6380696296691895, | |
| "learning_rate": 4.9472727272727276e-06, | |
| "loss": 0.0055, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.3139604553624924, | |
| "grad_norm": 2.3137929439544678, | |
| "learning_rate": 4.929090909090909e-06, | |
| "loss": 0.0081, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.3179548631915319, | |
| "grad_norm": 1.307806372642517, | |
| "learning_rate": 4.910909090909091e-06, | |
| "loss": 0.0107, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.3219492710205711, | |
| "grad_norm": 1.1640592813491821, | |
| "learning_rate": 4.892727272727273e-06, | |
| "loss": 0.0071, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.3259436788496106, | |
| "grad_norm": 0.8527657389640808, | |
| "learning_rate": 4.874545454545455e-06, | |
| "loss": 0.0075, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.3299380866786499, | |
| "grad_norm": 1.5638917684555054, | |
| "learning_rate": 4.856363636363637e-06, | |
| "loss": 0.0077, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.3339324945076894, | |
| "grad_norm": 3.4176924228668213, | |
| "learning_rate": 4.838181818181819e-06, | |
| "loss": 0.0134, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.3379269023367286, | |
| "grad_norm": 1.353973388671875, | |
| "learning_rate": 4.8200000000000004e-06, | |
| "loss": 0.0063, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.3419213101657679, | |
| "grad_norm": 1.280344843864441, | |
| "learning_rate": 4.801818181818182e-06, | |
| "loss": 0.0131, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.3459157179948074, | |
| "grad_norm": 1.0167405605316162, | |
| "learning_rate": 4.783636363636364e-06, | |
| "loss": 0.0091, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.3499101258238466, | |
| "grad_norm": 0.347282350063324, | |
| "learning_rate": 4.765454545454546e-06, | |
| "loss": 0.0066, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.3539045336528859, | |
| "grad_norm": 0.9135898351669312, | |
| "learning_rate": 4.747272727272728e-06, | |
| "loss": 0.0093, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.3578989414819254, | |
| "grad_norm": 0.4328613877296448, | |
| "learning_rate": 4.72909090909091e-06, | |
| "loss": 0.0089, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.3618933493109646, | |
| "grad_norm": 0.7179979681968689, | |
| "learning_rate": 4.7109090909090915e-06, | |
| "loss": 0.0078, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.3658877571400039, | |
| "grad_norm": 1.748172402381897, | |
| "learning_rate": 4.692727272727273e-06, | |
| "loss": 0.0062, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.3698821649690434, | |
| "grad_norm": 0.30844706296920776, | |
| "learning_rate": 4.674545454545455e-06, | |
| "loss": 0.0041, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.3738765727980826, | |
| "grad_norm": 1.167373538017273, | |
| "learning_rate": 4.656363636363637e-06, | |
| "loss": 0.008, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.377870980627122, | |
| "grad_norm": 1.5947142839431763, | |
| "learning_rate": 4.638181818181818e-06, | |
| "loss": 0.0067, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.3818653884561614, | |
| "grad_norm": 1.5891377925872803, | |
| "learning_rate": 4.620000000000001e-06, | |
| "loss": 0.007, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.3858597962852008, | |
| "grad_norm": 2.0473077297210693, | |
| "learning_rate": 4.6018181818181825e-06, | |
| "loss": 0.0096, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.38985420411424, | |
| "grad_norm": 2.16921067237854, | |
| "learning_rate": 4.583636363636364e-06, | |
| "loss": 0.0084, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.3938486119432794, | |
| "grad_norm": 2.0252652168273926, | |
| "learning_rate": 4.565454545454545e-06, | |
| "loss": 0.0081, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.3978430197723188, | |
| "grad_norm": 1.2650831937789917, | |
| "learning_rate": 4.547272727272727e-06, | |
| "loss": 0.0065, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.401837427601358, | |
| "grad_norm": 1.4343613386154175, | |
| "learning_rate": 4.52909090909091e-06, | |
| "loss": 0.0067, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.4058318354303974, | |
| "grad_norm": 0.9264397025108337, | |
| "learning_rate": 4.510909090909092e-06, | |
| "loss": 0.0084, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.4098262432594368, | |
| "grad_norm": 0.47705429792404175, | |
| "learning_rate": 4.492727272727273e-06, | |
| "loss": 0.0147, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.413820651088476, | |
| "grad_norm": 0.33390942215919495, | |
| "learning_rate": 4.4745454545454545e-06, | |
| "loss": 0.0067, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.4178150589175154, | |
| "grad_norm": 0.7790824174880981, | |
| "learning_rate": 4.456363636363637e-06, | |
| "loss": 0.0073, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.4218094667465548, | |
| "grad_norm": 1.8825984001159668, | |
| "learning_rate": 4.438181818181819e-06, | |
| "loss": 0.0119, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.425803874575594, | |
| "grad_norm": 2.6809818744659424, | |
| "learning_rate": 4.42e-06, | |
| "loss": 0.0076, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.4297982824046336, | |
| "grad_norm": 1.4570934772491455, | |
| "learning_rate": 4.401818181818182e-06, | |
| "loss": 0.0073, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.4337926902336728, | |
| "grad_norm": 0.7681152820587158, | |
| "learning_rate": 4.383636363636364e-06, | |
| "loss": 0.0044, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.4377870980627123, | |
| "grad_norm": 0.2887667715549469, | |
| "learning_rate": 4.365454545454546e-06, | |
| "loss": 0.0058, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.4417815058917516, | |
| "grad_norm": 1.9216909408569336, | |
| "learning_rate": 4.347272727272727e-06, | |
| "loss": 0.0079, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.4457759137207908, | |
| "grad_norm": 1.959386944770813, | |
| "learning_rate": 4.329090909090909e-06, | |
| "loss": 0.007, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.4497703215498303, | |
| "grad_norm": 1.421069860458374, | |
| "learning_rate": 4.310909090909091e-06, | |
| "loss": 0.0064, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.4537647293788696, | |
| "grad_norm": 1.9688565731048584, | |
| "learning_rate": 4.292727272727273e-06, | |
| "loss": 0.0065, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.4577591372079088, | |
| "grad_norm": 0.46297717094421387, | |
| "learning_rate": 4.274545454545455e-06, | |
| "loss": 0.0045, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.4617535450369483, | |
| "grad_norm": 0.6084499359130859, | |
| "learning_rate": 4.256363636363637e-06, | |
| "loss": 0.0063, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.4657479528659876, | |
| "grad_norm": 2.4733595848083496, | |
| "learning_rate": 4.238181818181818e-06, | |
| "loss": 0.0082, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.4697423606950268, | |
| "grad_norm": 0.8938449025154114, | |
| "learning_rate": 4.22e-06, | |
| "loss": 0.0056, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.4737367685240663, | |
| "grad_norm": 1.3058514595031738, | |
| "learning_rate": 4.201818181818182e-06, | |
| "loss": 0.0079, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.4777311763531056, | |
| "grad_norm": 0.42573487758636475, | |
| "learning_rate": 4.183636363636364e-06, | |
| "loss": 0.0083, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.481725584182145, | |
| "grad_norm": 2.336344003677368, | |
| "learning_rate": 4.165454545454546e-06, | |
| "loss": 0.0105, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.4857199920111843, | |
| "grad_norm": 0.4930185079574585, | |
| "learning_rate": 4.147272727272728e-06, | |
| "loss": 0.0083, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.4897143998402238, | |
| "grad_norm": 1.1534149646759033, | |
| "learning_rate": 4.1290909090909094e-06, | |
| "loss": 0.0057, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.493708807669263, | |
| "grad_norm": 1.0539729595184326, | |
| "learning_rate": 4.110909090909091e-06, | |
| "loss": 0.0075, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.4977032154983023, | |
| "grad_norm": 1.9535585641860962, | |
| "learning_rate": 4.092727272727273e-06, | |
| "loss": 0.0081, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.5016976233273418, | |
| "grad_norm": 1.5772063732147217, | |
| "learning_rate": 4.074545454545455e-06, | |
| "loss": 0.0074, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.505692031156381, | |
| "grad_norm": 1.9321742057800293, | |
| "learning_rate": 4.056363636363637e-06, | |
| "loss": 0.0076, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.5096864389854203, | |
| "grad_norm": 1.391487717628479, | |
| "learning_rate": 4.038181818181819e-06, | |
| "loss": 0.0054, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.5136808468144598, | |
| "grad_norm": 0.29940593242645264, | |
| "learning_rate": 4.0200000000000005e-06, | |
| "loss": 0.0075, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.517675254643499, | |
| "grad_norm": 0.7316534519195557, | |
| "learning_rate": 4.001818181818182e-06, | |
| "loss": 0.0059, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.5216696624725383, | |
| "grad_norm": 0.8725746273994446, | |
| "learning_rate": 3.983636363636364e-06, | |
| "loss": 0.0082, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.5256640703015778, | |
| "grad_norm": 1.1659518480300903, | |
| "learning_rate": 3.965454545454546e-06, | |
| "loss": 0.0056, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.5296584781306173, | |
| "grad_norm": 0.9450637698173523, | |
| "learning_rate": 3.947272727272727e-06, | |
| "loss": 0.0078, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.5336528859596563, | |
| "grad_norm": 0.7942542433738708, | |
| "learning_rate": 3.92909090909091e-06, | |
| "loss": 0.0056, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.5376472937886958, | |
| "grad_norm": 2.9248387813568115, | |
| "learning_rate": 3.9109090909090915e-06, | |
| "loss": 0.0065, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.5416417016177353, | |
| "grad_norm": 0.9460102319717407, | |
| "learning_rate": 3.892727272727273e-06, | |
| "loss": 0.0063, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.5456361094467745, | |
| "grad_norm": 1.5365135669708252, | |
| "learning_rate": 3.874545454545454e-06, | |
| "loss": 0.0091, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.5496305172758138, | |
| "grad_norm": 1.6475716829299927, | |
| "learning_rate": 3.856363636363636e-06, | |
| "loss": 0.005, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.5536249251048533, | |
| "grad_norm": 0.8353217244148254, | |
| "learning_rate": 3.838181818181819e-06, | |
| "loss": 0.0043, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.5576193329338925, | |
| "grad_norm": 0.36758750677108765, | |
| "learning_rate": 3.820000000000001e-06, | |
| "loss": 0.004, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.5616137407629318, | |
| "grad_norm": 0.2298198938369751, | |
| "learning_rate": 3.801818181818182e-06, | |
| "loss": 0.0084, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.5656081485919713, | |
| "grad_norm": 1.170965313911438, | |
| "learning_rate": 3.783636363636364e-06, | |
| "loss": 0.0067, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.5696025564210105, | |
| "grad_norm": 0.3110823631286621, | |
| "learning_rate": 3.765454545454546e-06, | |
| "loss": 0.0072, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.5735969642500498, | |
| "grad_norm": 1.0501469373703003, | |
| "learning_rate": 3.7472727272727276e-06, | |
| "loss": 0.0101, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.5775913720790893, | |
| "grad_norm": 1.3563507795333862, | |
| "learning_rate": 3.7290909090909095e-06, | |
| "loss": 0.0063, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.5815857799081288, | |
| "grad_norm": 1.415356159210205, | |
| "learning_rate": 3.7109090909090913e-06, | |
| "loss": 0.0048, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.5855801877371678, | |
| "grad_norm": 1.6954106092453003, | |
| "learning_rate": 3.6927272727272727e-06, | |
| "loss": 0.0083, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.5895745955662073, | |
| "grad_norm": 0.9005348086357117, | |
| "learning_rate": 3.674545454545455e-06, | |
| "loss": 0.0093, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.5935690033952468, | |
| "grad_norm": 0.7900477647781372, | |
| "learning_rate": 3.656363636363637e-06, | |
| "loss": 0.0087, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.597563411224286, | |
| "grad_norm": 1.1566689014434814, | |
| "learning_rate": 3.6381818181818187e-06, | |
| "loss": 0.0072, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.597563411224286, | |
| "eval_loss": 0.009391536004841328, | |
| "eval_runtime": 7517.9662, | |
| "eval_samples_per_second": 2.664, | |
| "eval_steps_per_second": 0.333, | |
| "eval_wer": 0.7816670479676657, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.6015578190533253, | |
| "grad_norm": 1.0169645547866821, | |
| "learning_rate": 3.62e-06, | |
| "loss": 0.0093, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.6055522268823648, | |
| "grad_norm": 1.4514238834381104, | |
| "learning_rate": 3.601818181818182e-06, | |
| "loss": 0.0079, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.609546634711404, | |
| "grad_norm": 0.7925217747688293, | |
| "learning_rate": 3.583636363636364e-06, | |
| "loss": 0.0063, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.6135410425404433, | |
| "grad_norm": 1.5382804870605469, | |
| "learning_rate": 3.565454545454546e-06, | |
| "loss": 0.008, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.6175354503694828, | |
| "grad_norm": 1.5981707572937012, | |
| "learning_rate": 3.5472727272727274e-06, | |
| "loss": 0.0052, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.621529858198522, | |
| "grad_norm": 0.7755517363548279, | |
| "learning_rate": 3.5290909090909093e-06, | |
| "loss": 0.0058, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.6255242660275613, | |
| "grad_norm": 0.9957447648048401, | |
| "learning_rate": 3.510909090909091e-06, | |
| "loss": 0.0084, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.6295186738566008, | |
| "grad_norm": 0.39248886704444885, | |
| "learning_rate": 3.4927272727272734e-06, | |
| "loss": 0.0091, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.6335130816856402, | |
| "grad_norm": 0.7498955130577087, | |
| "learning_rate": 3.4745454545454548e-06, | |
| "loss": 0.0045, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.6375074895146795, | |
| "grad_norm": 1.9002658128738403, | |
| "learning_rate": 3.4563636363636366e-06, | |
| "loss": 0.0053, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.6415018973437188, | |
| "grad_norm": 1.1334868669509888, | |
| "learning_rate": 3.4381818181818185e-06, | |
| "loss": 0.0082, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.6454963051727582, | |
| "grad_norm": 0.7285448908805847, | |
| "learning_rate": 3.4200000000000007e-06, | |
| "loss": 0.005, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.6494907130017975, | |
| "grad_norm": 0.21141596138477325, | |
| "learning_rate": 3.401818181818182e-06, | |
| "loss": 0.0066, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.6534851208308368, | |
| "grad_norm": 1.2618212699890137, | |
| "learning_rate": 3.383636363636364e-06, | |
| "loss": 0.0062, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.6574795286598762, | |
| "grad_norm": 0.4843064844608307, | |
| "learning_rate": 3.365454545454546e-06, | |
| "loss": 0.0113, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.6614739364889155, | |
| "grad_norm": 1.6873738765716553, | |
| "learning_rate": 3.3472727272727272e-06, | |
| "loss": 0.0074, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.6654683443179548, | |
| "grad_norm": 1.6466562747955322, | |
| "learning_rate": 3.3290909090909095e-06, | |
| "loss": 0.0069, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.6694627521469942, | |
| "grad_norm": 1.5858378410339355, | |
| "learning_rate": 3.3109090909090913e-06, | |
| "loss": 0.0052, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.6734571599760335, | |
| "grad_norm": 0.9765921235084534, | |
| "learning_rate": 3.292727272727273e-06, | |
| "loss": 0.0077, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.6774515678050728, | |
| "grad_norm": 0.858818769454956, | |
| "learning_rate": 3.2745454545454546e-06, | |
| "loss": 0.0064, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.6814459756341122, | |
| "grad_norm": 2.0127484798431396, | |
| "learning_rate": 3.2563636363636364e-06, | |
| "loss": 0.0051, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.6854403834631517, | |
| "grad_norm": 0.588580846786499, | |
| "learning_rate": 3.2381818181818187e-06, | |
| "loss": 0.0067, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.689434791292191, | |
| "grad_norm": 0.42130523920059204, | |
| "learning_rate": 3.2200000000000005e-06, | |
| "loss": 0.0083, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.6934291991212302, | |
| "grad_norm": 0.5209999680519104, | |
| "learning_rate": 3.201818181818182e-06, | |
| "loss": 0.0068, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.6974236069502697, | |
| "grad_norm": 0.638425886631012, | |
| "learning_rate": 3.1836363636363638e-06, | |
| "loss": 0.0078, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.701418014779309, | |
| "grad_norm": 2.0498859882354736, | |
| "learning_rate": 3.1654545454545456e-06, | |
| "loss": 0.0064, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.7054124226083482, | |
| "grad_norm": 0.8182183504104614, | |
| "learning_rate": 3.147272727272728e-06, | |
| "loss": 0.0059, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.7094068304373877, | |
| "grad_norm": 2.9371860027313232, | |
| "learning_rate": 3.1290909090909093e-06, | |
| "loss": 0.0072, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.713401238266427, | |
| "grad_norm": 1.737328052520752, | |
| "learning_rate": 3.110909090909091e-06, | |
| "loss": 0.0069, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.7173956460954662, | |
| "grad_norm": 0.9384961724281311, | |
| "learning_rate": 3.092727272727273e-06, | |
| "loss": 0.0064, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.7213900539245057, | |
| "grad_norm": 2.61478328704834, | |
| "learning_rate": 3.0745454545454552e-06, | |
| "loss": 0.0066, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.725384461753545, | |
| "grad_norm": 0.6870206594467163, | |
| "learning_rate": 3.0563636363636366e-06, | |
| "loss": 0.0073, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.7293788695825842, | |
| "grad_norm": 1.5444024801254272, | |
| "learning_rate": 3.0381818181818185e-06, | |
| "loss": 0.0071, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.7333732774116237, | |
| "grad_norm": 1.0944797992706299, | |
| "learning_rate": 3.0200000000000003e-06, | |
| "loss": 0.0042, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.7373676852406632, | |
| "grad_norm": 1.1846562623977661, | |
| "learning_rate": 3.0018181818181817e-06, | |
| "loss": 0.008, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.7413620930697025, | |
| "grad_norm": 0.3231483995914459, | |
| "learning_rate": 2.983636363636364e-06, | |
| "loss": 0.0056, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.7453565008987417, | |
| "grad_norm": 1.571178913116455, | |
| "learning_rate": 2.965454545454546e-06, | |
| "loss": 0.0059, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.7493509087277812, | |
| "grad_norm": 1.2889162302017212, | |
| "learning_rate": 2.9472727272727277e-06, | |
| "loss": 0.0076, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.7533453165568205, | |
| "grad_norm": 0.7713314890861511, | |
| "learning_rate": 2.929090909090909e-06, | |
| "loss": 0.0066, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.7573397243858597, | |
| "grad_norm": 2.216398000717163, | |
| "learning_rate": 2.910909090909091e-06, | |
| "loss": 0.0057, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.7613341322148992, | |
| "grad_norm": 2.5534586906433105, | |
| "learning_rate": 2.892727272727273e-06, | |
| "loss": 0.0098, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.7653285400439385, | |
| "grad_norm": 1.9392699003219604, | |
| "learning_rate": 2.874545454545455e-06, | |
| "loss": 0.0064, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.7693229478729777, | |
| "grad_norm": 0.7885441780090332, | |
| "learning_rate": 2.8563636363636364e-06, | |
| "loss": 0.0072, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.7733173557020172, | |
| "grad_norm": 0.9948071241378784, | |
| "learning_rate": 2.8381818181818183e-06, | |
| "loss": 0.0071, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.7773117635310565, | |
| "grad_norm": 0.9726841449737549, | |
| "learning_rate": 2.82e-06, | |
| "loss": 0.0096, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.7813061713600957, | |
| "grad_norm": 1.1720293760299683, | |
| "learning_rate": 2.8018181818181824e-06, | |
| "loss": 0.006, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.7853005791891352, | |
| "grad_norm": 0.894111692905426, | |
| "learning_rate": 2.7836363636363638e-06, | |
| "loss": 0.0079, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.7892949870181747, | |
| "grad_norm": 1.6590758562088013, | |
| "learning_rate": 2.7654545454545456e-06, | |
| "loss": 0.009, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.793289394847214, | |
| "grad_norm": 2.0424230098724365, | |
| "learning_rate": 2.7472727272727275e-06, | |
| "loss": 0.0064, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.7972838026762532, | |
| "grad_norm": 2.1726737022399902, | |
| "learning_rate": 2.7290909090909097e-06, | |
| "loss": 0.0078, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.8012782105052927, | |
| "grad_norm": 0.4029232859611511, | |
| "learning_rate": 2.710909090909091e-06, | |
| "loss": 0.0105, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.805272618334332, | |
| "grad_norm": 1.8009533882141113, | |
| "learning_rate": 2.692727272727273e-06, | |
| "loss": 0.0085, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.8092670261633712, | |
| "grad_norm": 1.1566264629364014, | |
| "learning_rate": 2.674545454545455e-06, | |
| "loss": 0.0039, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.8132614339924107, | |
| "grad_norm": 1.4425307512283325, | |
| "learning_rate": 2.6563636363636362e-06, | |
| "loss": 0.0074, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.81725584182145, | |
| "grad_norm": 0.8721437454223633, | |
| "learning_rate": 2.6381818181818185e-06, | |
| "loss": 0.0064, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.8212502496504892, | |
| "grad_norm": 0.976674497127533, | |
| "learning_rate": 2.6200000000000003e-06, | |
| "loss": 0.0033, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.8252446574795287, | |
| "grad_norm": 0.8307082056999207, | |
| "learning_rate": 2.601818181818182e-06, | |
| "loss": 0.0053, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.8292390653085682, | |
| "grad_norm": 0.25643059611320496, | |
| "learning_rate": 2.5836363636363636e-06, | |
| "loss": 0.0079, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.8332334731376072, | |
| "grad_norm": 2.7886087894439697, | |
| "learning_rate": 2.5654545454545454e-06, | |
| "loss": 0.0088, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.8372278809666467, | |
| "grad_norm": 0.3735600411891937, | |
| "learning_rate": 2.5472727272727277e-06, | |
| "loss": 0.0071, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.8412222887956862, | |
| "grad_norm": 0.8093706369400024, | |
| "learning_rate": 2.5290909090909095e-06, | |
| "loss": 0.0086, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.8452166966247254, | |
| "grad_norm": 3.601065158843994, | |
| "learning_rate": 2.510909090909091e-06, | |
| "loss": 0.0061, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.8492111044537647, | |
| "grad_norm": 0.9476358890533447, | |
| "learning_rate": 2.492727272727273e-06, | |
| "loss": 0.0083, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.8532055122828042, | |
| "grad_norm": 1.1815894842147827, | |
| "learning_rate": 2.4745454545454546e-06, | |
| "loss": 0.0108, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.8571999201118434, | |
| "grad_norm": 0.382880300283432, | |
| "learning_rate": 2.4563636363636364e-06, | |
| "loss": 0.0042, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.8611943279408827, | |
| "grad_norm": 0.46276092529296875, | |
| "learning_rate": 2.4381818181818183e-06, | |
| "loss": 0.0071, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.8651887357699222, | |
| "grad_norm": 0.8990813493728638, | |
| "learning_rate": 2.42e-06, | |
| "loss": 0.0061, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.8691831435989614, | |
| "grad_norm": 0.8797413110733032, | |
| "learning_rate": 2.401818181818182e-06, | |
| "loss": 0.005, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.8731775514280007, | |
| "grad_norm": 2.5706422328948975, | |
| "learning_rate": 2.383636363636364e-06, | |
| "loss": 0.0064, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.8771719592570402, | |
| "grad_norm": 1.353135108947754, | |
| "learning_rate": 2.3654545454545456e-06, | |
| "loss": 0.0042, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.8811663670860796, | |
| "grad_norm": 0.8188295960426331, | |
| "learning_rate": 2.3472727272727275e-06, | |
| "loss": 0.01, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.8851607749151187, | |
| "grad_norm": 1.070620059967041, | |
| "learning_rate": 2.3290909090909093e-06, | |
| "loss": 0.0089, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.8891551827441582, | |
| "grad_norm": 0.45555347204208374, | |
| "learning_rate": 2.310909090909091e-06, | |
| "loss": 0.006, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.8931495905731976, | |
| "grad_norm": 0.24032829701900482, | |
| "learning_rate": 2.292727272727273e-06, | |
| "loss": 0.0048, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.897143998402237, | |
| "grad_norm": 1.4816186428070068, | |
| "learning_rate": 2.274545454545455e-06, | |
| "loss": 0.006, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.9011384062312762, | |
| "grad_norm": 0.911095380783081, | |
| "learning_rate": 2.2563636363636367e-06, | |
| "loss": 0.0055, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.9051328140603156, | |
| "grad_norm": 1.3121198415756226, | |
| "learning_rate": 2.238181818181818e-06, | |
| "loss": 0.0076, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.909127221889355, | |
| "grad_norm": 0.4576851725578308, | |
| "learning_rate": 2.2200000000000003e-06, | |
| "loss": 0.0076, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.9131216297183942, | |
| "grad_norm": 0.4526568055152893, | |
| "learning_rate": 2.2018181818181818e-06, | |
| "loss": 0.0049, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.9171160375474336, | |
| "grad_norm": 2.19838285446167, | |
| "learning_rate": 2.183636363636364e-06, | |
| "loss": 0.007, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.921110445376473, | |
| "grad_norm": 1.7481430768966675, | |
| "learning_rate": 2.1654545454545454e-06, | |
| "loss": 0.0075, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.9251048532055122, | |
| "grad_norm": 1.511225938796997, | |
| "learning_rate": 2.1472727272727277e-06, | |
| "loss": 0.0053, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.9290992610345516, | |
| "grad_norm": 0.44884830713272095, | |
| "learning_rate": 2.129090909090909e-06, | |
| "loss": 0.0054, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.9330936688635911, | |
| "grad_norm": 1.4792394638061523, | |
| "learning_rate": 2.110909090909091e-06, | |
| "loss": 0.0063, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.9370880766926302, | |
| "grad_norm": 1.228148102760315, | |
| "learning_rate": 2.092727272727273e-06, | |
| "loss": 0.0049, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.9410824845216696, | |
| "grad_norm": 1.785239338874817, | |
| "learning_rate": 2.0745454545454546e-06, | |
| "loss": 0.0082, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.9450768923507091, | |
| "grad_norm": 1.5273386240005493, | |
| "learning_rate": 2.0563636363636365e-06, | |
| "loss": 0.0049, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.9490713001797484, | |
| "grad_norm": 1.5013206005096436, | |
| "learning_rate": 2.0381818181818183e-06, | |
| "loss": 0.0064, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.9530657080087876, | |
| "grad_norm": 2.1962521076202393, | |
| "learning_rate": 2.02e-06, | |
| "loss": 0.0046, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.9570601158378271, | |
| "grad_norm": 1.8651431798934937, | |
| "learning_rate": 2.001818181818182e-06, | |
| "loss": 0.0067, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.9610545236668664, | |
| "grad_norm": 0.9547003507614136, | |
| "learning_rate": 1.983636363636364e-06, | |
| "loss": 0.0062, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.9650489314959056, | |
| "grad_norm": 1.2471400499343872, | |
| "learning_rate": 1.9654545454545457e-06, | |
| "loss": 0.005, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.9690433393249451, | |
| "grad_norm": 0.28791195154190063, | |
| "learning_rate": 1.9472727272727275e-06, | |
| "loss": 0.007, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.9730377471539844, | |
| "grad_norm": 1.5757160186767578, | |
| "learning_rate": 1.9290909090909093e-06, | |
| "loss": 0.0069, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.9770321549830236, | |
| "grad_norm": 0.5068025588989258, | |
| "learning_rate": 1.910909090909091e-06, | |
| "loss": 0.0077, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.9810265628120631, | |
| "grad_norm": 0.2998751699924469, | |
| "learning_rate": 1.8927272727272728e-06, | |
| "loss": 0.0045, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.9850209706411026, | |
| "grad_norm": 0.4755394756793976, | |
| "learning_rate": 1.8745454545454546e-06, | |
| "loss": 0.0044, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.9890153784701416, | |
| "grad_norm": 1.0987839698791504, | |
| "learning_rate": 1.8563636363636365e-06, | |
| "loss": 0.0075, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.9930097862991811, | |
| "grad_norm": 1.1204299926757812, | |
| "learning_rate": 1.8381818181818183e-06, | |
| "loss": 0.0066, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.9970041941282206, | |
| "grad_norm": 0.4197748005390167, | |
| "learning_rate": 1.8200000000000002e-06, | |
| "loss": 0.0055, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 2.000798881565808, | |
| "grad_norm": 0.19804427027702332, | |
| "learning_rate": 1.801818181818182e-06, | |
| "loss": 0.0044, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 2.004793289394847, | |
| "grad_norm": 0.36684417724609375, | |
| "learning_rate": 1.7836363636363638e-06, | |
| "loss": 0.0027, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 2.0087876972238865, | |
| "grad_norm": 0.6657086610794067, | |
| "learning_rate": 1.7654545454545455e-06, | |
| "loss": 0.0017, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 2.012782105052926, | |
| "grad_norm": 0.05520480126142502, | |
| "learning_rate": 1.7472727272727275e-06, | |
| "loss": 0.0016, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 2.0167765128819655, | |
| "grad_norm": 0.09664568305015564, | |
| "learning_rate": 1.7290909090909091e-06, | |
| "loss": 0.0019, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 2.0207709207110045, | |
| "grad_norm": 0.37269479036331177, | |
| "learning_rate": 1.7109090909090912e-06, | |
| "loss": 0.0018, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 2.024765328540044, | |
| "grad_norm": 0.11714975535869598, | |
| "learning_rate": 1.6927272727272728e-06, | |
| "loss": 0.0013, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 2.0287597363690835, | |
| "grad_norm": 0.24795569479465485, | |
| "learning_rate": 1.6745454545454549e-06, | |
| "loss": 0.0014, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 2.0327541441981225, | |
| "grad_norm": 0.12845009565353394, | |
| "learning_rate": 1.6563636363636365e-06, | |
| "loss": 0.0026, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 2.036748552027162, | |
| "grad_norm": 0.2782312035560608, | |
| "learning_rate": 1.6381818181818181e-06, | |
| "loss": 0.0017, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 2.0407429598562015, | |
| "grad_norm": 0.15887074172496796, | |
| "learning_rate": 1.6200000000000002e-06, | |
| "loss": 0.0014, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 2.0447373676852405, | |
| "grad_norm": 0.13874128460884094, | |
| "learning_rate": 1.6018181818181818e-06, | |
| "loss": 0.0023, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 2.04873177551428, | |
| "grad_norm": 0.34651249647140503, | |
| "learning_rate": 1.5836363636363638e-06, | |
| "loss": 0.0036, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 2.0527261833433195, | |
| "grad_norm": 0.1842481791973114, | |
| "learning_rate": 1.5654545454545455e-06, | |
| "loss": 0.0016, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 2.0567205911723585, | |
| "grad_norm": 0.16887687146663666, | |
| "learning_rate": 1.5472727272727275e-06, | |
| "loss": 0.0031, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 2.060714999001398, | |
| "grad_norm": 0.24830514192581177, | |
| "learning_rate": 1.5290909090909091e-06, | |
| "loss": 0.0014, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 2.0647094068304375, | |
| "grad_norm": 0.08864196389913559, | |
| "learning_rate": 1.510909090909091e-06, | |
| "loss": 0.0014, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 2.068703814659477, | |
| "grad_norm": 0.2347860485315323, | |
| "learning_rate": 1.4927272727272728e-06, | |
| "loss": 0.0015, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 2.072698222488516, | |
| "grad_norm": 0.17117437720298767, | |
| "learning_rate": 1.4745454545454547e-06, | |
| "loss": 0.0025, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 2.0766926303175555, | |
| "grad_norm": 1.6268408298492432, | |
| "learning_rate": 1.4563636363636365e-06, | |
| "loss": 0.004, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 2.080687038146595, | |
| "grad_norm": 0.3034919202327728, | |
| "learning_rate": 1.4381818181818183e-06, | |
| "loss": 0.0015, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 2.084681445975634, | |
| "grad_norm": 0.15098127722740173, | |
| "learning_rate": 1.42e-06, | |
| "loss": 0.0041, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 2.0886758538046735, | |
| "grad_norm": 0.5514845252037048, | |
| "learning_rate": 1.401818181818182e-06, | |
| "loss": 0.001, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 2.092670261633713, | |
| "grad_norm": 0.9603298306465149, | |
| "learning_rate": 1.3836363636363636e-06, | |
| "loss": 0.0017, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 2.096664669462752, | |
| "grad_norm": 0.23426704108715057, | |
| "learning_rate": 1.3654545454545457e-06, | |
| "loss": 0.0016, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 2.1006590772917915, | |
| "grad_norm": 0.1383952647447586, | |
| "learning_rate": 1.3472727272727273e-06, | |
| "loss": 0.0013, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 2.104653485120831, | |
| "grad_norm": 0.22570370137691498, | |
| "learning_rate": 1.3290909090909094e-06, | |
| "loss": 0.002, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 2.10864789294987, | |
| "grad_norm": 0.2061236947774887, | |
| "learning_rate": 1.310909090909091e-06, | |
| "loss": 0.001, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 2.1126423007789095, | |
| "grad_norm": 0.1093958392739296, | |
| "learning_rate": 1.2927272727272728e-06, | |
| "loss": 0.0026, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 2.116636708607949, | |
| "grad_norm": 0.11976289749145508, | |
| "learning_rate": 1.2745454545454547e-06, | |
| "loss": 0.0033, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 2.1206311164369884, | |
| "grad_norm": 0.18352824449539185, | |
| "learning_rate": 1.2563636363636365e-06, | |
| "loss": 0.0011, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 2.1246255242660275, | |
| "grad_norm": 0.11029355227947235, | |
| "learning_rate": 1.2381818181818183e-06, | |
| "loss": 0.0013, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 2.128619932095067, | |
| "grad_norm": 0.8353503942489624, | |
| "learning_rate": 1.2200000000000002e-06, | |
| "loss": 0.0023, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 2.1326143399241064, | |
| "grad_norm": 0.30112984776496887, | |
| "learning_rate": 1.201818181818182e-06, | |
| "loss": 0.0012, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 2.1366087477531455, | |
| "grad_norm": 0.4989093542098999, | |
| "learning_rate": 1.1836363636363639e-06, | |
| "loss": 0.0019, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 2.140603155582185, | |
| "grad_norm": 0.09659893065690994, | |
| "learning_rate": 1.1654545454545457e-06, | |
| "loss": 0.002, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 2.1445975634112244, | |
| "grad_norm": 0.10450714826583862, | |
| "learning_rate": 1.1472727272727275e-06, | |
| "loss": 0.0033, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 2.1485919712402635, | |
| "grad_norm": 0.1421971321105957, | |
| "learning_rate": 1.1290909090909092e-06, | |
| "loss": 0.0011, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 2.152586379069303, | |
| "grad_norm": 0.13540399074554443, | |
| "learning_rate": 1.110909090909091e-06, | |
| "loss": 0.0015, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 2.1565807868983424, | |
| "grad_norm": 0.13781139254570007, | |
| "learning_rate": 1.0927272727272728e-06, | |
| "loss": 0.0015, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 2.1605751947273815, | |
| "grad_norm": 0.12392697483301163, | |
| "learning_rate": 1.0745454545454547e-06, | |
| "loss": 0.0024, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 2.164569602556421, | |
| "grad_norm": 0.326977014541626, | |
| "learning_rate": 1.0563636363636365e-06, | |
| "loss": 0.0023, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 2.1685640103854604, | |
| "grad_norm": 0.1042676791548729, | |
| "learning_rate": 1.0381818181818183e-06, | |
| "loss": 0.0018, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 2.1725584182145, | |
| "grad_norm": 0.16718903183937073, | |
| "learning_rate": 1.02e-06, | |
| "loss": 0.0013, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 2.176552826043539, | |
| "grad_norm": 1.135487675666809, | |
| "learning_rate": 1.0018181818181818e-06, | |
| "loss": 0.0022, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 2.1805472338725784, | |
| "grad_norm": 0.1465596705675125, | |
| "learning_rate": 9.836363636363637e-07, | |
| "loss": 0.002, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 2.184541641701618, | |
| "grad_norm": 0.18476231396198273, | |
| "learning_rate": 9.654545454545455e-07, | |
| "loss": 0.0016, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 2.188536049530657, | |
| "grad_norm": 0.1280352771282196, | |
| "learning_rate": 9.472727272727273e-07, | |
| "loss": 0.0018, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 2.1925304573596964, | |
| "grad_norm": 0.3329474627971649, | |
| "learning_rate": 9.290909090909092e-07, | |
| "loss": 0.0018, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 2.196524865188736, | |
| "grad_norm": 0.4752912223339081, | |
| "learning_rate": 9.10909090909091e-07, | |
| "loss": 0.0015, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 2.200519273017775, | |
| "grad_norm": 0.10243528336286545, | |
| "learning_rate": 8.927272727272727e-07, | |
| "loss": 0.002, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 2.2045136808468144, | |
| "grad_norm": 0.09885286539793015, | |
| "learning_rate": 8.745454545454546e-07, | |
| "loss": 0.0021, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 2.208508088675854, | |
| "grad_norm": 0.08831353485584259, | |
| "learning_rate": 8.563636363636364e-07, | |
| "loss": 0.0013, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 2.212502496504893, | |
| "grad_norm": 0.32018667459487915, | |
| "learning_rate": 8.381818181818183e-07, | |
| "loss": 0.002, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 2.2164969043339324, | |
| "grad_norm": 0.17237108945846558, | |
| "learning_rate": 8.200000000000001e-07, | |
| "loss": 0.0018, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 2.220491312162972, | |
| "grad_norm": 0.2744125723838806, | |
| "learning_rate": 8.018181818181819e-07, | |
| "loss": 0.001, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 2.2244857199920114, | |
| "grad_norm": 0.134715735912323, | |
| "learning_rate": 7.836363636363637e-07, | |
| "loss": 0.001, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 2.2284801278210504, | |
| "grad_norm": 0.40728604793548584, | |
| "learning_rate": 7.654545454545455e-07, | |
| "loss": 0.0011, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 2.23247453565009, | |
| "grad_norm": 0.28443050384521484, | |
| "learning_rate": 7.472727272727273e-07, | |
| "loss": 0.0018, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 2.2364689434791294, | |
| "grad_norm": 0.27793049812316895, | |
| "learning_rate": 7.290909090909092e-07, | |
| "loss": 0.0017, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 2.2404633513081684, | |
| "grad_norm": 0.10264703631401062, | |
| "learning_rate": 7.10909090909091e-07, | |
| "loss": 0.0044, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 2.244457759137208, | |
| "grad_norm": 0.12262337654829025, | |
| "learning_rate": 6.927272727272729e-07, | |
| "loss": 0.0011, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 2.2484521669662474, | |
| "grad_norm": 0.17834536731243134, | |
| "learning_rate": 6.745454545454547e-07, | |
| "loss": 0.0012, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 2.2524465747952864, | |
| "grad_norm": 0.25186392664909363, | |
| "learning_rate": 6.563636363636363e-07, | |
| "loss": 0.0032, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 2.256440982624326, | |
| "grad_norm": 0.23524995148181915, | |
| "learning_rate": 6.381818181818183e-07, | |
| "loss": 0.0036, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 2.2604353904533654, | |
| "grad_norm": 0.08354029059410095, | |
| "learning_rate": 6.200000000000001e-07, | |
| "loss": 0.0009, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 2.2644297982824044, | |
| "grad_norm": 0.5130342841148376, | |
| "learning_rate": 6.018181818181819e-07, | |
| "loss": 0.0012, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 2.268424206111444, | |
| "grad_norm": 0.17740464210510254, | |
| "learning_rate": 5.836363636363637e-07, | |
| "loss": 0.0045, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 2.2724186139404834, | |
| "grad_norm": 0.07126139849424362, | |
| "learning_rate": 5.654545454545455e-07, | |
| "loss": 0.0017, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 2.276413021769523, | |
| "grad_norm": 0.07142548263072968, | |
| "learning_rate": 5.472727272727273e-07, | |
| "loss": 0.0032, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 2.280407429598562, | |
| "grad_norm": 0.1244952380657196, | |
| "learning_rate": 5.290909090909092e-07, | |
| "loss": 0.0011, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 2.2844018374276014, | |
| "grad_norm": 0.6580970883369446, | |
| "learning_rate": 5.109090909090909e-07, | |
| "loss": 0.0015, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 2.288396245256641, | |
| "grad_norm": 0.06932102888822556, | |
| "learning_rate": 4.927272727272728e-07, | |
| "loss": 0.0025, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 2.29239065308568, | |
| "grad_norm": 1.0498089790344238, | |
| "learning_rate": 4.745454545454546e-07, | |
| "loss": 0.0037, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 2.2963850609147194, | |
| "grad_norm": 0.13281786441802979, | |
| "learning_rate": 4.563636363636364e-07, | |
| "loss": 0.0035, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 2.300379468743759, | |
| "grad_norm": 0.37985411286354065, | |
| "learning_rate": 4.381818181818182e-07, | |
| "loss": 0.0012, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 2.304373876572798, | |
| "grad_norm": 0.20376817882061005, | |
| "learning_rate": 4.2000000000000006e-07, | |
| "loss": 0.0021, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 2.3083682844018374, | |
| "grad_norm": 0.04566574841737747, | |
| "learning_rate": 4.0181818181818184e-07, | |
| "loss": 0.0018, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 2.312362692230877, | |
| "grad_norm": 0.12633280456066132, | |
| "learning_rate": 3.836363636363637e-07, | |
| "loss": 0.002, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 2.316357100059916, | |
| "grad_norm": 0.08685902506113052, | |
| "learning_rate": 3.654545454545455e-07, | |
| "loss": 0.0012, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 2.3203515078889554, | |
| "grad_norm": 0.18134891986846924, | |
| "learning_rate": 3.4727272727272725e-07, | |
| "loss": 0.0022, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 2.324345915717995, | |
| "grad_norm": 0.06675967574119568, | |
| "learning_rate": 3.290909090909091e-07, | |
| "loss": 0.0012, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 2.3283403235470344, | |
| "grad_norm": 0.14449624717235565, | |
| "learning_rate": 3.109090909090909e-07, | |
| "loss": 0.0022, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 2.3323347313760734, | |
| "grad_norm": 0.24351002275943756, | |
| "learning_rate": 2.9272727272727276e-07, | |
| "loss": 0.0021, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 2.336329139205113, | |
| "grad_norm": 0.314222514629364, | |
| "learning_rate": 2.7454545454545455e-07, | |
| "loss": 0.0027, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 2.3403235470341524, | |
| "grad_norm": 0.5329357981681824, | |
| "learning_rate": 2.563636363636364e-07, | |
| "loss": 0.0016, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 2.3443179548631914, | |
| "grad_norm": 0.6777558922767639, | |
| "learning_rate": 2.381818181818182e-07, | |
| "loss": 0.0018, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 2.348312362692231, | |
| "grad_norm": 0.5858981609344482, | |
| "learning_rate": 2.2e-07, | |
| "loss": 0.0015, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 2.3523067705212704, | |
| "grad_norm": 0.29430916905403137, | |
| "learning_rate": 2.0181818181818185e-07, | |
| "loss": 0.0023, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 2.3563011783503094, | |
| "grad_norm": 0.07274141162633896, | |
| "learning_rate": 1.8363636363636363e-07, | |
| "loss": 0.0015, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 2.360295586179349, | |
| "grad_norm": 0.14015686511993408, | |
| "learning_rate": 1.6545454545454545e-07, | |
| "loss": 0.0016, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 2.3642899940083884, | |
| "grad_norm": 0.13865520060062408, | |
| "learning_rate": 1.4727272727272728e-07, | |
| "loss": 0.0024, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 2.3682844018374274, | |
| "grad_norm": 0.3300738036632538, | |
| "learning_rate": 1.290909090909091e-07, | |
| "loss": 0.004, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 2.372278809666467, | |
| "grad_norm": 0.10867658257484436, | |
| "learning_rate": 1.1090909090909091e-07, | |
| "loss": 0.0016, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 2.3762732174955064, | |
| "grad_norm": 0.06421338766813278, | |
| "learning_rate": 9.272727272727273e-08, | |
| "loss": 0.0019, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 2.380267625324546, | |
| "grad_norm": 0.09091343730688095, | |
| "learning_rate": 7.454545454545455e-08, | |
| "loss": 0.0009, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 2.384262033153585, | |
| "grad_norm": 0.06275226920843124, | |
| "learning_rate": 5.636363636363637e-08, | |
| "loss": 0.001, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 2.3882564409826244, | |
| "grad_norm": 0.13375839591026306, | |
| "learning_rate": 3.818181818181819e-08, | |
| "loss": 0.0011, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 2.392250848811664, | |
| "grad_norm": 0.1499566286802292, | |
| "learning_rate": 2e-08, | |
| "loss": 0.0011, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 2.396245256640703, | |
| "grad_norm": 0.47475066781044006, | |
| "learning_rate": 1.8181818181818182e-09, | |
| "loss": 0.0017, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 2.396245256640703, | |
| "eval_loss": 0.006691531278192997, | |
| "eval_runtime": 7534.0645, | |
| "eval_samples_per_second": 2.658, | |
| "eval_steps_per_second": 0.332, | |
| "eval_wer": 0.4967807301369415, | |
| "step": 6000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 6000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.539223607902208e+19, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |