| { | |
| "best_global_step": 4000, | |
| "best_metric": 0.7496644515009657, | |
| "best_model_checkpoint": "./SALAMA_NEW5/checkpoint-4000", | |
| "epoch": 2.396644697423607, | |
| "eval_steps": 2000, | |
| "global_step": 4000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005991611743559017, | |
| "grad_norm": 3.054955244064331, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.0402, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.011983223487118035, | |
| "grad_norm": 4.194129943847656, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.0309, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.017974835230677052, | |
| "grad_norm": 3.2008094787597656, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.0349, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02396644697423607, | |
| "grad_norm": 5.843791484832764, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.0486, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.029958058717795086, | |
| "grad_norm": 2.8723483085632324, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.0387, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.035949670461354104, | |
| "grad_norm": 2.8198156356811523, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.0307, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.041941282204913125, | |
| "grad_norm": 2.8341281414031982, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.0281, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04793289394847214, | |
| "grad_norm": 2.1870110034942627, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.0247, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05392450569203116, | |
| "grad_norm": 3.1290953159332275, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.0368, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05991611743559017, | |
| "grad_norm": 2.8471310138702393, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.0278, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0659077291791492, | |
| "grad_norm": 2.17521595954895, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.0379, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.07189934092270821, | |
| "grad_norm": 2.6352105140686035, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.0373, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07789095266626722, | |
| "grad_norm": 2.4513449668884277, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.0277, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.08388256440982625, | |
| "grad_norm": 2.2260937690734863, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.0303, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08987417615338526, | |
| "grad_norm": 3.3591432571411133, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.0294, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.09586578789694428, | |
| "grad_norm": 4.335723876953125, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.0454, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.10185739964050329, | |
| "grad_norm": 2.558425188064575, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.043, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.10784901138406232, | |
| "grad_norm": 1.9048619270324707, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0393, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.11384062312762133, | |
| "grad_norm": 2.985074043273926, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.0455, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.11983223487118035, | |
| "grad_norm": 3.3705410957336426, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.0418, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12582384661473936, | |
| "grad_norm": 3.761021852493286, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.029, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.1318154583582984, | |
| "grad_norm": 3.0466420650482178, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.0411, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.1378070701018574, | |
| "grad_norm": 2.7269339561462402, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0388, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.14379868184541642, | |
| "grad_norm": 2.970675468444824, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.0417, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.14979029358897544, | |
| "grad_norm": 1.9261115789413452, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.0251, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.15578190533253444, | |
| "grad_norm": 3.860489845275879, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.0311, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.16177351707609347, | |
| "grad_norm": 3.1818289756774902, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.0313, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1677651288196525, | |
| "grad_norm": 2.9667961597442627, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.0444, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.1737567405632115, | |
| "grad_norm": 2.8325982093811035, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.0317, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.17974835230677053, | |
| "grad_norm": 3.026876211166382, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.032, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.18573996405032953, | |
| "grad_norm": 3.14216685295105, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.0437, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.19173157579388855, | |
| "grad_norm": 1.519713044166565, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.0278, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.19772318753744758, | |
| "grad_norm": 4.41205358505249, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.0398, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.20371479928100658, | |
| "grad_norm": 2.6780660152435303, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.0395, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.2097064110245656, | |
| "grad_norm": 3.0380663871765137, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.0474, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.21569802276812464, | |
| "grad_norm": 3.55961012840271, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.0329, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.22168963451168364, | |
| "grad_norm": 4.072763442993164, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.0341, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.22768124625524266, | |
| "grad_norm": 3.622586250305176, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.0319, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.23367285799880166, | |
| "grad_norm": 2.926701545715332, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.0323, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.2396644697423607, | |
| "grad_norm": 2.8291685581207275, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.0355, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.24565608148591972, | |
| "grad_norm": 3.0425820350646973, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.0449, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2516476932294787, | |
| "grad_norm": 2.2372989654541016, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.0316, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2576393049730378, | |
| "grad_norm": 4.378376483917236, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.0428, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2636309167165968, | |
| "grad_norm": 3.7955782413482666, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.0348, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.2696225284601558, | |
| "grad_norm": 2.99241304397583, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.0344, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.2756141402037148, | |
| "grad_norm": 3.7702858448028564, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.0448, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.28160575194727383, | |
| "grad_norm": 2.994795799255371, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.0386, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.28759736369083283, | |
| "grad_norm": 4.249560356140137, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.0329, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.29358897543439183, | |
| "grad_norm": 3.8317806720733643, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.0341, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2995805871779509, | |
| "grad_norm": 3.258640766143799, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.0391, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.3055721989215099, | |
| "grad_norm": 3.7174925804138184, | |
| "learning_rate": 9.985427461139897e-06, | |
| "loss": 0.0418, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.3115638106650689, | |
| "grad_norm": 2.631500482559204, | |
| "learning_rate": 9.969235751295337e-06, | |
| "loss": 0.0382, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.31755542240862794, | |
| "grad_norm": 3.008939027786255, | |
| "learning_rate": 9.953044041450778e-06, | |
| "loss": 0.0361, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.32354703415218694, | |
| "grad_norm": 3.0445072650909424, | |
| "learning_rate": 9.936852331606218e-06, | |
| "loss": 0.0409, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.32953864589574594, | |
| "grad_norm": 2.2396678924560547, | |
| "learning_rate": 9.920660621761659e-06, | |
| "loss": 0.046, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.335530257639305, | |
| "grad_norm": 3.8621556758880615, | |
| "learning_rate": 9.9044689119171e-06, | |
| "loss": 0.0398, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.341521869382864, | |
| "grad_norm": 2.762946844100952, | |
| "learning_rate": 9.888277202072539e-06, | |
| "loss": 0.0478, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.347513481126423, | |
| "grad_norm": 2.5804591178894043, | |
| "learning_rate": 9.87208549222798e-06, | |
| "loss": 0.0393, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.35350509286998205, | |
| "grad_norm": 2.925901412963867, | |
| "learning_rate": 9.85589378238342e-06, | |
| "loss": 0.0413, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.35949670461354105, | |
| "grad_norm": 3.877678394317627, | |
| "learning_rate": 9.839702072538862e-06, | |
| "loss": 0.036, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.36548831635710005, | |
| "grad_norm": 3.657261371612549, | |
| "learning_rate": 9.823510362694301e-06, | |
| "loss": 0.0392, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.37147992810065905, | |
| "grad_norm": 3.3975796699523926, | |
| "learning_rate": 9.807318652849742e-06, | |
| "loss": 0.0354, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3774715398442181, | |
| "grad_norm": 2.704132080078125, | |
| "learning_rate": 9.791126943005183e-06, | |
| "loss": 0.0357, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.3834631515877771, | |
| "grad_norm": 3.333548069000244, | |
| "learning_rate": 9.774935233160622e-06, | |
| "loss": 0.0416, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3894547633313361, | |
| "grad_norm": 2.5030357837677, | |
| "learning_rate": 9.758743523316063e-06, | |
| "loss": 0.0396, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.39544637507489516, | |
| "grad_norm": 3.135446310043335, | |
| "learning_rate": 9.742551813471504e-06, | |
| "loss": 0.0335, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.40143798681845416, | |
| "grad_norm": 3.9234273433685303, | |
| "learning_rate": 9.726360103626944e-06, | |
| "loss": 0.0314, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.40742959856201316, | |
| "grad_norm": 3.275618553161621, | |
| "learning_rate": 9.710168393782385e-06, | |
| "loss": 0.048, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.4134212103055722, | |
| "grad_norm": 4.415038108825684, | |
| "learning_rate": 9.693976683937824e-06, | |
| "loss": 0.0476, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.4194128220491312, | |
| "grad_norm": 4.066105365753174, | |
| "learning_rate": 9.677784974093265e-06, | |
| "loss": 0.0388, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.4254044337926902, | |
| "grad_norm": 3.2467117309570312, | |
| "learning_rate": 9.661593264248706e-06, | |
| "loss": 0.0429, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4313960455362493, | |
| "grad_norm": 2.8135671615600586, | |
| "learning_rate": 9.645401554404145e-06, | |
| "loss": 0.0307, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.4373876572798083, | |
| "grad_norm": 3.0795235633850098, | |
| "learning_rate": 9.629209844559586e-06, | |
| "loss": 0.0322, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4433792690233673, | |
| "grad_norm": 3.1245977878570557, | |
| "learning_rate": 9.613018134715027e-06, | |
| "loss": 0.0401, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.44937088076692633, | |
| "grad_norm": 3.60970139503479, | |
| "learning_rate": 9.596826424870466e-06, | |
| "loss": 0.0404, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.45536249251048533, | |
| "grad_norm": 3.176697254180908, | |
| "learning_rate": 9.580634715025907e-06, | |
| "loss": 0.0512, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.46135410425404433, | |
| "grad_norm": 1.4304643869400024, | |
| "learning_rate": 9.564443005181347e-06, | |
| "loss": 0.0324, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.46734571599760333, | |
| "grad_norm": 4.420058250427246, | |
| "learning_rate": 9.548251295336788e-06, | |
| "loss": 0.0428, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4733373277411624, | |
| "grad_norm": 1.9721171855926514, | |
| "learning_rate": 9.532059585492229e-06, | |
| "loss": 0.0372, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4793289394847214, | |
| "grad_norm": 2.6821985244750977, | |
| "learning_rate": 9.51586787564767e-06, | |
| "loss": 0.0335, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.4853205512282804, | |
| "grad_norm": 2.514643430709839, | |
| "learning_rate": 9.49967616580311e-06, | |
| "loss": 0.0375, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.49131216297183944, | |
| "grad_norm": 2.563555955886841, | |
| "learning_rate": 9.48348445595855e-06, | |
| "loss": 0.04, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.49730377471539844, | |
| "grad_norm": 4.12261962890625, | |
| "learning_rate": 9.467292746113991e-06, | |
| "loss": 0.0475, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.5032953864589574, | |
| "grad_norm": 3.868694543838501, | |
| "learning_rate": 9.451101036269432e-06, | |
| "loss": 0.0412, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.5092869982025164, | |
| "grad_norm": 2.079617738723755, | |
| "learning_rate": 9.434909326424871e-06, | |
| "loss": 0.0406, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.5152786099460755, | |
| "grad_norm": 9.873922348022461, | |
| "learning_rate": 9.418717616580312e-06, | |
| "loss": 0.0369, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.5212702216896345, | |
| "grad_norm": 2.7303805351257324, | |
| "learning_rate": 9.402525906735751e-06, | |
| "loss": 0.0346, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.5272618334331935, | |
| "grad_norm": 4.011098384857178, | |
| "learning_rate": 9.386334196891192e-06, | |
| "loss": 0.0449, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5332534451767525, | |
| "grad_norm": 3.526718854904175, | |
| "learning_rate": 9.370142487046633e-06, | |
| "loss": 0.0402, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5392450569203115, | |
| "grad_norm": 4.020525932312012, | |
| "learning_rate": 9.353950777202073e-06, | |
| "loss": 0.0483, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.5452366686638705, | |
| "grad_norm": 3.401987075805664, | |
| "learning_rate": 9.337759067357514e-06, | |
| "loss": 0.0483, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.5512282804074295, | |
| "grad_norm": 3.5527446269989014, | |
| "learning_rate": 9.321567357512955e-06, | |
| "loss": 0.0368, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5572198921509887, | |
| "grad_norm": 1.9209288358688354, | |
| "learning_rate": 9.305375647668394e-06, | |
| "loss": 0.044, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5632115038945477, | |
| "grad_norm": 2.181330442428589, | |
| "learning_rate": 9.289183937823835e-06, | |
| "loss": 0.0428, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5692031156381067, | |
| "grad_norm": 3.110551595687866, | |
| "learning_rate": 9.272992227979276e-06, | |
| "loss": 0.0463, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5751947273816657, | |
| "grad_norm": 2.9438483715057373, | |
| "learning_rate": 9.256800518134715e-06, | |
| "loss": 0.0429, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5811863391252247, | |
| "grad_norm": 2.3534343242645264, | |
| "learning_rate": 9.240608808290156e-06, | |
| "loss": 0.0324, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5871779508687837, | |
| "grad_norm": 2.0768914222717285, | |
| "learning_rate": 9.224417098445595e-06, | |
| "loss": 0.0379, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5931695626123428, | |
| "grad_norm": 3.7332491874694824, | |
| "learning_rate": 9.208225388601038e-06, | |
| "loss": 0.0345, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5991611743559018, | |
| "grad_norm": 2.2312402725219727, | |
| "learning_rate": 9.192033678756477e-06, | |
| "loss": 0.0256, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.6051527860994608, | |
| "grad_norm": 2.953599691390991, | |
| "learning_rate": 9.175841968911918e-06, | |
| "loss": 0.0319, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.6111443978430198, | |
| "grad_norm": 2.3059096336364746, | |
| "learning_rate": 9.15965025906736e-06, | |
| "loss": 0.0401, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.6171360095865788, | |
| "grad_norm": 2.9137516021728516, | |
| "learning_rate": 9.143458549222799e-06, | |
| "loss": 0.0332, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.6231276213301378, | |
| "grad_norm": 2.6183419227600098, | |
| "learning_rate": 9.12726683937824e-06, | |
| "loss": 0.0426, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.6291192330736968, | |
| "grad_norm": 6.774188041687012, | |
| "learning_rate": 9.11107512953368e-06, | |
| "loss": 0.038, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.6351108448172559, | |
| "grad_norm": 2.62477707862854, | |
| "learning_rate": 9.09488341968912e-06, | |
| "loss": 0.0334, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.6411024565608149, | |
| "grad_norm": 2.885798931121826, | |
| "learning_rate": 9.07869170984456e-06, | |
| "loss": 0.0503, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.6470940683043739, | |
| "grad_norm": 2.11885666847229, | |
| "learning_rate": 9.0625e-06, | |
| "loss": 0.0397, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6530856800479329, | |
| "grad_norm": 3.4099042415618896, | |
| "learning_rate": 9.046308290155441e-06, | |
| "loss": 0.0376, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.6590772917914919, | |
| "grad_norm": 3.797945737838745, | |
| "learning_rate": 9.030116580310882e-06, | |
| "loss": 0.0353, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6650689035350509, | |
| "grad_norm": 2.498478889465332, | |
| "learning_rate": 9.013924870466321e-06, | |
| "loss": 0.0348, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.67106051527861, | |
| "grad_norm": 2.4544801712036133, | |
| "learning_rate": 8.997733160621762e-06, | |
| "loss": 0.0445, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.677052127022169, | |
| "grad_norm": 3.8221726417541504, | |
| "learning_rate": 8.981541450777203e-06, | |
| "loss": 0.0329, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.683043738765728, | |
| "grad_norm": 3.10368013381958, | |
| "learning_rate": 8.965349740932643e-06, | |
| "loss": 0.0362, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.689035350509287, | |
| "grad_norm": 2.9055230617523193, | |
| "learning_rate": 8.949158031088084e-06, | |
| "loss": 0.0321, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.695026962252846, | |
| "grad_norm": 2.983210563659668, | |
| "learning_rate": 8.932966321243523e-06, | |
| "loss": 0.0349, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.701018573996405, | |
| "grad_norm": 3.8483057022094727, | |
| "learning_rate": 8.916774611398964e-06, | |
| "loss": 0.0377, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.7070101857399641, | |
| "grad_norm": 1.701863408088684, | |
| "learning_rate": 8.900582901554405e-06, | |
| "loss": 0.038, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.7130017974835231, | |
| "grad_norm": 3.209134101867676, | |
| "learning_rate": 8.884391191709846e-06, | |
| "loss": 0.0366, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.7189934092270821, | |
| "grad_norm": 2.894364356994629, | |
| "learning_rate": 8.868199481865287e-06, | |
| "loss": 0.0411, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.7249850209706411, | |
| "grad_norm": 2.638300895690918, | |
| "learning_rate": 8.852007772020726e-06, | |
| "loss": 0.0402, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.7309766327142001, | |
| "grad_norm": 2.969956398010254, | |
| "learning_rate": 8.835816062176167e-06, | |
| "loss": 0.0461, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.7369682444577591, | |
| "grad_norm": 3.42706298828125, | |
| "learning_rate": 8.819624352331608e-06, | |
| "loss": 0.0387, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.7429598562013181, | |
| "grad_norm": 3.150902509689331, | |
| "learning_rate": 8.803432642487047e-06, | |
| "loss": 0.0308, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.7489514679448772, | |
| "grad_norm": 2.9279613494873047, | |
| "learning_rate": 8.787240932642488e-06, | |
| "loss": 0.0356, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.7549430796884362, | |
| "grad_norm": 2.067312002182007, | |
| "learning_rate": 8.771049222797927e-06, | |
| "loss": 0.0415, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.7609346914319952, | |
| "grad_norm": 3.134526252746582, | |
| "learning_rate": 8.754857512953368e-06, | |
| "loss": 0.0339, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.7669263031755542, | |
| "grad_norm": 2.982830762863159, | |
| "learning_rate": 8.73866580310881e-06, | |
| "loss": 0.0312, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.7729179149191132, | |
| "grad_norm": 3.388392210006714, | |
| "learning_rate": 8.722474093264249e-06, | |
| "loss": 0.0289, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7789095266626722, | |
| "grad_norm": 4.263204574584961, | |
| "learning_rate": 8.70628238341969e-06, | |
| "loss": 0.0477, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7849011384062313, | |
| "grad_norm": 4.791659832000732, | |
| "learning_rate": 8.69009067357513e-06, | |
| "loss": 0.0357, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7908927501497903, | |
| "grad_norm": 2.5197086334228516, | |
| "learning_rate": 8.67389896373057e-06, | |
| "loss": 0.0395, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7968843618933493, | |
| "grad_norm": 3.8546409606933594, | |
| "learning_rate": 8.657707253886011e-06, | |
| "loss": 0.0425, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.8028759736369083, | |
| "grad_norm": 1.428477168083191, | |
| "learning_rate": 8.641515544041452e-06, | |
| "loss": 0.0416, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.8088675853804673, | |
| "grad_norm": 2.677443742752075, | |
| "learning_rate": 8.625323834196891e-06, | |
| "loss": 0.037, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.8148591971240263, | |
| "grad_norm": 2.3491549491882324, | |
| "learning_rate": 8.609132124352332e-06, | |
| "loss": 0.031, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.8208508088675854, | |
| "grad_norm": 3.307966947555542, | |
| "learning_rate": 8.592940414507773e-06, | |
| "loss": 0.0331, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.8268424206111444, | |
| "grad_norm": 2.554168701171875, | |
| "learning_rate": 8.576748704663214e-06, | |
| "loss": 0.0393, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.8328340323547034, | |
| "grad_norm": 4.652346611022949, | |
| "learning_rate": 8.560556994818653e-06, | |
| "loss": 0.0507, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.8388256440982624, | |
| "grad_norm": 3.3353545665740967, | |
| "learning_rate": 8.544365284974094e-06, | |
| "loss": 0.0475, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.8448172558418214, | |
| "grad_norm": 2.1270079612731934, | |
| "learning_rate": 8.528173575129535e-06, | |
| "loss": 0.0398, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.8508088675853804, | |
| "grad_norm": 3.5821142196655273, | |
| "learning_rate": 8.511981865284975e-06, | |
| "loss": 0.0391, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.8568004793289394, | |
| "grad_norm": 3.8081862926483154, | |
| "learning_rate": 8.495790155440416e-06, | |
| "loss": 0.0353, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.8627920910724985, | |
| "grad_norm": 2.368657112121582, | |
| "learning_rate": 8.479598445595855e-06, | |
| "loss": 0.0331, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.8687837028160575, | |
| "grad_norm": 3.060089349746704, | |
| "learning_rate": 8.463406735751296e-06, | |
| "loss": 0.0452, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8747753145596165, | |
| "grad_norm": 3.8501534461975098, | |
| "learning_rate": 8.447215025906737e-06, | |
| "loss": 0.0381, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.8807669263031755, | |
| "grad_norm": 3.5876686573028564, | |
| "learning_rate": 8.431023316062176e-06, | |
| "loss": 0.0398, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8867585380467345, | |
| "grad_norm": 3.394860029220581, | |
| "learning_rate": 8.414831606217617e-06, | |
| "loss": 0.0304, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.8927501497902935, | |
| "grad_norm": 2.1367321014404297, | |
| "learning_rate": 8.398639896373058e-06, | |
| "loss": 0.0393, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8987417615338527, | |
| "grad_norm": 3.4387011528015137, | |
| "learning_rate": 8.382448186528497e-06, | |
| "loss": 0.0438, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.9047333732774117, | |
| "grad_norm": 2.3441576957702637, | |
| "learning_rate": 8.366256476683938e-06, | |
| "loss": 0.0322, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.9107249850209707, | |
| "grad_norm": 2.45141339302063, | |
| "learning_rate": 8.35006476683938e-06, | |
| "loss": 0.0391, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.9167165967645297, | |
| "grad_norm": 3.975356340408325, | |
| "learning_rate": 8.333873056994819e-06, | |
| "loss": 0.0361, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.9227082085080887, | |
| "grad_norm": 3.1409218311309814, | |
| "learning_rate": 8.31768134715026e-06, | |
| "loss": 0.0292, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.9286998202516477, | |
| "grad_norm": 2.6796035766601562, | |
| "learning_rate": 8.301489637305699e-06, | |
| "loss": 0.0386, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.9346914319952067, | |
| "grad_norm": 3.725886583328247, | |
| "learning_rate": 8.28529792746114e-06, | |
| "loss": 0.0381, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.9406830437387658, | |
| "grad_norm": 3.8962814807891846, | |
| "learning_rate": 8.269106217616581e-06, | |
| "loss": 0.0338, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.9466746554823248, | |
| "grad_norm": 2.792511463165283, | |
| "learning_rate": 8.252914507772022e-06, | |
| "loss": 0.0358, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.9526662672258838, | |
| "grad_norm": 1.1965686082839966, | |
| "learning_rate": 8.236722797927463e-06, | |
| "loss": 0.032, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.9586578789694428, | |
| "grad_norm": 3.1600377559661865, | |
| "learning_rate": 8.220531088082902e-06, | |
| "loss": 0.0395, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.9646494907130018, | |
| "grad_norm": 3.8041903972625732, | |
| "learning_rate": 8.204339378238343e-06, | |
| "loss": 0.0376, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.9706411024565608, | |
| "grad_norm": 2.3930938243865967, | |
| "learning_rate": 8.188147668393784e-06, | |
| "loss": 0.0306, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.9766327142001199, | |
| "grad_norm": 1.6600314378738403, | |
| "learning_rate": 8.171955958549223e-06, | |
| "loss": 0.0337, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9826243259436789, | |
| "grad_norm": 2.756298065185547, | |
| "learning_rate": 8.155764248704664e-06, | |
| "loss": 0.0386, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.9886159376872379, | |
| "grad_norm": 3.6421010494232178, | |
| "learning_rate": 8.139572538860104e-06, | |
| "loss": 0.0444, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.9946075494307969, | |
| "grad_norm": 3.0130767822265625, | |
| "learning_rate": 8.123380829015545e-06, | |
| "loss": 0.0469, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.0005991611743559, | |
| "grad_norm": 0.8826521039009094, | |
| "learning_rate": 8.107189119170986e-06, | |
| "loss": 0.0438, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.0065907729179149, | |
| "grad_norm": 1.1715954542160034, | |
| "learning_rate": 8.090997409326425e-06, | |
| "loss": 0.0091, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.0125823846614739, | |
| "grad_norm": 1.0954883098602295, | |
| "learning_rate": 8.074805699481866e-06, | |
| "loss": 0.0131, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.0185739964050329, | |
| "grad_norm": 1.6219624280929565, | |
| "learning_rate": 8.058613989637307e-06, | |
| "loss": 0.0114, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.0245656081485919, | |
| "grad_norm": 1.850096583366394, | |
| "learning_rate": 8.042422279792746e-06, | |
| "loss": 0.014, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.030557219892151, | |
| "grad_norm": 1.4714280366897583, | |
| "learning_rate": 8.026230569948187e-06, | |
| "loss": 0.014, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.03654883163571, | |
| "grad_norm": 0.9599210619926453, | |
| "learning_rate": 8.010038860103628e-06, | |
| "loss": 0.0123, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.042540443379269, | |
| "grad_norm": 2.0420618057250977, | |
| "learning_rate": 7.993847150259067e-06, | |
| "loss": 0.0129, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.048532055122828, | |
| "grad_norm": 2.185182571411133, | |
| "learning_rate": 7.977655440414508e-06, | |
| "loss": 0.0142, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.054523666866387, | |
| "grad_norm": 1.5642657279968262, | |
| "learning_rate": 7.96146373056995e-06, | |
| "loss": 0.0146, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.060515278609946, | |
| "grad_norm": 1.761810302734375, | |
| "learning_rate": 7.94527202072539e-06, | |
| "loss": 0.013, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.066506890353505, | |
| "grad_norm": 1.4610791206359863, | |
| "learning_rate": 7.92908031088083e-06, | |
| "loss": 0.0137, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.072498502097064, | |
| "grad_norm": 2.5526697635650635, | |
| "learning_rate": 7.91288860103627e-06, | |
| "loss": 0.0113, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.078490113840623, | |
| "grad_norm": 1.9310262203216553, | |
| "learning_rate": 7.896696891191711e-06, | |
| "loss": 0.0115, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.084481725584182, | |
| "grad_norm": 1.3230663537979126, | |
| "learning_rate": 7.88050518134715e-06, | |
| "loss": 0.0125, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.090473337327741, | |
| "grad_norm": 1.7172852754592896, | |
| "learning_rate": 7.864313471502592e-06, | |
| "loss": 0.0109, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.0964649490713, | |
| "grad_norm": 0.9016011953353882, | |
| "learning_rate": 7.848121761658031e-06, | |
| "loss": 0.0111, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.102456560814859, | |
| "grad_norm": 2.4582221508026123, | |
| "learning_rate": 7.831930051813472e-06, | |
| "loss": 0.0134, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.1084481725584183, | |
| "grad_norm": 1.2407488822937012, | |
| "learning_rate": 7.815738341968913e-06, | |
| "loss": 0.0157, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.1144397843019773, | |
| "grad_norm": 2.757380485534668, | |
| "learning_rate": 7.799546632124352e-06, | |
| "loss": 0.0113, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.1204313960455363, | |
| "grad_norm": 1.3119287490844727, | |
| "learning_rate": 7.783354922279793e-06, | |
| "loss": 0.0141, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.1264230077890953, | |
| "grad_norm": 1.2015535831451416, | |
| "learning_rate": 7.767163212435234e-06, | |
| "loss": 0.0097, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.1324146195326543, | |
| "grad_norm": 0.945795476436615, | |
| "learning_rate": 7.750971502590674e-06, | |
| "loss": 0.01, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.1384062312762133, | |
| "grad_norm": 2.025670289993286, | |
| "learning_rate": 7.734779792746114e-06, | |
| "loss": 0.0162, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.1443978430197723, | |
| "grad_norm": 2.3837294578552246, | |
| "learning_rate": 7.718588082901555e-06, | |
| "loss": 0.0131, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.1503894547633313, | |
| "grad_norm": 1.0956385135650635, | |
| "learning_rate": 7.702396373056995e-06, | |
| "loss": 0.0116, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.1563810665068903, | |
| "grad_norm": 1.218746304512024, | |
| "learning_rate": 7.686204663212436e-06, | |
| "loss": 0.0123, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.1623726782504493, | |
| "grad_norm": 1.3381291627883911, | |
| "learning_rate": 7.670012953367875e-06, | |
| "loss": 0.0131, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.1683642899940083, | |
| "grad_norm": 2.1365227699279785, | |
| "learning_rate": 7.653821243523318e-06, | |
| "loss": 0.0153, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.1743559017375673, | |
| "grad_norm": 2.483567953109741, | |
| "learning_rate": 7.637629533678757e-06, | |
| "loss": 0.0141, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.1803475134811263, | |
| "grad_norm": 1.4842569828033447, | |
| "learning_rate": 7.621437823834198e-06, | |
| "loss": 0.0129, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.1863391252246855, | |
| "grad_norm": 0.8382937908172607, | |
| "learning_rate": 7.605246113989638e-06, | |
| "loss": 0.0138, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.1923307369682445, | |
| "grad_norm": 1.9443378448486328, | |
| "learning_rate": 7.589054404145079e-06, | |
| "loss": 0.0113, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.1983223487118035, | |
| "grad_norm": 1.5882847309112549, | |
| "learning_rate": 7.572862694300519e-06, | |
| "loss": 0.0149, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.1983223487118035, | |
| "eval_loss": 0.01669400744140148, | |
| "eval_runtime": 5969.257, | |
| "eval_samples_per_second": 2.237, | |
| "eval_steps_per_second": 0.28, | |
| "eval_wer": 1.6916554817166987, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.2043139604553625, | |
| "grad_norm": 1.8422805070877075, | |
| "learning_rate": 7.556670984455959e-06, | |
| "loss": 0.0111, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.2103055721989215, | |
| "grad_norm": 1.9448800086975098, | |
| "learning_rate": 7.5404792746113994e-06, | |
| "loss": 0.0119, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.2162971839424805, | |
| "grad_norm": 2.3415045738220215, | |
| "learning_rate": 7.52428756476684e-06, | |
| "loss": 0.0132, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.2222887956860395, | |
| "grad_norm": 1.5432671308517456, | |
| "learning_rate": 7.5080958549222805e-06, | |
| "loss": 0.0109, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.2282804074295985, | |
| "grad_norm": 2.6311228275299072, | |
| "learning_rate": 7.491904145077721e-06, | |
| "loss": 0.0122, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.2342720191731575, | |
| "grad_norm": 1.8125990629196167, | |
| "learning_rate": 7.475712435233161e-06, | |
| "loss": 0.016, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.2402636309167165, | |
| "grad_norm": 2.044970750808716, | |
| "learning_rate": 7.459520725388602e-06, | |
| "loss": 0.0107, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.2462552426602755, | |
| "grad_norm": 1.6010984182357788, | |
| "learning_rate": 7.443329015544042e-06, | |
| "loss": 0.0157, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.2522468544038348, | |
| "grad_norm": 1.2587394714355469, | |
| "learning_rate": 7.427137305699482e-06, | |
| "loss": 0.0117, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.2582384661473935, | |
| "grad_norm": 2.0188496112823486, | |
| "learning_rate": 7.410945595854922e-06, | |
| "loss": 0.0138, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.2642300778909528, | |
| "grad_norm": 1.0700892210006714, | |
| "learning_rate": 7.394753886010363e-06, | |
| "loss": 0.0106, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.2702216896345118, | |
| "grad_norm": 1.6175626516342163, | |
| "learning_rate": 7.378562176165803e-06, | |
| "loss": 0.0091, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.2762133013780708, | |
| "grad_norm": 2.6413605213165283, | |
| "learning_rate": 7.362370466321243e-06, | |
| "loss": 0.0166, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.2822049131216298, | |
| "grad_norm": 2.32147479057312, | |
| "learning_rate": 7.346178756476684e-06, | |
| "loss": 0.0157, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.2881965248651888, | |
| "grad_norm": 0.9610141515731812, | |
| "learning_rate": 7.329987046632125e-06, | |
| "loss": 0.0134, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.2941881366087478, | |
| "grad_norm": 1.8824456930160522, | |
| "learning_rate": 7.3137953367875655e-06, | |
| "loss": 0.0112, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.3001797483523068, | |
| "grad_norm": 2.3188655376434326, | |
| "learning_rate": 7.2976036269430065e-06, | |
| "loss": 0.0132, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.3061713600958658, | |
| "grad_norm": 0.7640858888626099, | |
| "learning_rate": 7.281411917098447e-06, | |
| "loss": 0.0142, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.3121629718394248, | |
| "grad_norm": 1.854457974433899, | |
| "learning_rate": 7.265220207253887e-06, | |
| "loss": 0.016, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.3181545835829838, | |
| "grad_norm": 2.088655710220337, | |
| "learning_rate": 7.249028497409327e-06, | |
| "loss": 0.0192, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.3241461953265428, | |
| "grad_norm": 1.0051050186157227, | |
| "learning_rate": 7.232836787564768e-06, | |
| "loss": 0.0133, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.330137807070102, | |
| "grad_norm": 2.056321620941162, | |
| "learning_rate": 7.216645077720208e-06, | |
| "loss": 0.0106, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.3361294188136608, | |
| "grad_norm": 1.9347177743911743, | |
| "learning_rate": 7.200453367875648e-06, | |
| "loss": 0.0155, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.34212103055722, | |
| "grad_norm": 3.1228256225585938, | |
| "learning_rate": 7.184261658031088e-06, | |
| "loss": 0.0155, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.348112642300779, | |
| "grad_norm": 1.391132116317749, | |
| "learning_rate": 7.168069948186529e-06, | |
| "loss": 0.0118, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.354104254044338, | |
| "grad_norm": 1.646230697631836, | |
| "learning_rate": 7.151878238341969e-06, | |
| "loss": 0.012, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.360095865787897, | |
| "grad_norm": 1.2384326457977295, | |
| "learning_rate": 7.1356865284974095e-06, | |
| "loss": 0.0096, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.366087477531456, | |
| "grad_norm": 2.0356945991516113, | |
| "learning_rate": 7.1194948186528505e-06, | |
| "loss": 0.0114, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.372079089275015, | |
| "grad_norm": 2.6498777866363525, | |
| "learning_rate": 7.103303108808291e-06, | |
| "loss": 0.0174, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.378070701018574, | |
| "grad_norm": 1.1051616668701172, | |
| "learning_rate": 7.087111398963731e-06, | |
| "loss": 0.0144, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.384062312762133, | |
| "grad_norm": 1.8187443017959595, | |
| "learning_rate": 7.070919689119171e-06, | |
| "loss": 0.0125, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.390053924505692, | |
| "grad_norm": 1.7438606023788452, | |
| "learning_rate": 7.054727979274612e-06, | |
| "loss": 0.0124, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.396045536249251, | |
| "grad_norm": 1.630191445350647, | |
| "learning_rate": 7.038536269430052e-06, | |
| "loss": 0.0148, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.40203714799281, | |
| "grad_norm": 1.9865493774414062, | |
| "learning_rate": 7.022344559585493e-06, | |
| "loss": 0.0119, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.4080287597363692, | |
| "grad_norm": 1.144235372543335, | |
| "learning_rate": 7.006152849740934e-06, | |
| "loss": 0.0124, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.414020371479928, | |
| "grad_norm": 1.5504426956176758, | |
| "learning_rate": 6.989961139896374e-06, | |
| "loss": 0.0151, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.4200119832234872, | |
| "grad_norm": 2.2012171745300293, | |
| "learning_rate": 6.973769430051814e-06, | |
| "loss": 0.0124, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.4260035949670462, | |
| "grad_norm": 0.8054595589637756, | |
| "learning_rate": 6.957577720207255e-06, | |
| "loss": 0.0118, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.4319952067106052, | |
| "grad_norm": 1.585860013961792, | |
| "learning_rate": 6.941386010362695e-06, | |
| "loss": 0.0188, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.4379868184541642, | |
| "grad_norm": 2.1649467945098877, | |
| "learning_rate": 6.925194300518135e-06, | |
| "loss": 0.0151, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.4439784301977232, | |
| "grad_norm": 1.1747404336929321, | |
| "learning_rate": 6.9090025906735755e-06, | |
| "loss": 0.011, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.4499700419412822, | |
| "grad_norm": 1.132906436920166, | |
| "learning_rate": 6.8928108808290165e-06, | |
| "loss": 0.018, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.4559616536848412, | |
| "grad_norm": 1.7166882753372192, | |
| "learning_rate": 6.876619170984457e-06, | |
| "loss": 0.016, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.4619532654284002, | |
| "grad_norm": 1.3083289861679077, | |
| "learning_rate": 6.860427461139897e-06, | |
| "loss": 0.0121, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.4679448771719592, | |
| "grad_norm": 1.786952018737793, | |
| "learning_rate": 6.844235751295337e-06, | |
| "loss": 0.0133, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.4739364889155182, | |
| "grad_norm": 1.2713590860366821, | |
| "learning_rate": 6.828044041450778e-06, | |
| "loss": 0.0111, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.4799281006590772, | |
| "grad_norm": 3.510352611541748, | |
| "learning_rate": 6.811852331606218e-06, | |
| "loss": 0.0168, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.4859197124026364, | |
| "grad_norm": 1.5672187805175781, | |
| "learning_rate": 6.795660621761658e-06, | |
| "loss": 0.0128, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.4919113241461952, | |
| "grad_norm": 1.66056227684021, | |
| "learning_rate": 6.779468911917098e-06, | |
| "loss": 0.011, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.4979029358897544, | |
| "grad_norm": 1.6276098489761353, | |
| "learning_rate": 6.763277202072539e-06, | |
| "loss": 0.0143, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.5038945476333132, | |
| "grad_norm": 1.007177472114563, | |
| "learning_rate": 6.747085492227979e-06, | |
| "loss": 0.0204, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.5098861593768724, | |
| "grad_norm": 1.4152703285217285, | |
| "learning_rate": 6.7308937823834195e-06, | |
| "loss": 0.0127, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.5158777711204314, | |
| "grad_norm": 1.85543954372406, | |
| "learning_rate": 6.714702072538861e-06, | |
| "loss": 0.0097, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.5218693828639904, | |
| "grad_norm": 3.0540146827697754, | |
| "learning_rate": 6.6985103626943015e-06, | |
| "loss": 0.0113, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.5278609946075494, | |
| "grad_norm": 1.1710422039031982, | |
| "learning_rate": 6.682318652849742e-06, | |
| "loss": 0.0097, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.5338526063511084, | |
| "grad_norm": 1.7694730758666992, | |
| "learning_rate": 6.666126943005183e-06, | |
| "loss": 0.0089, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.5398442180946674, | |
| "grad_norm": 1.0694987773895264, | |
| "learning_rate": 6.649935233160623e-06, | |
| "loss": 0.0153, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.5458358298382264, | |
| "grad_norm": 2.7110159397125244, | |
| "learning_rate": 6.633743523316063e-06, | |
| "loss": 0.0116, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.5518274415817856, | |
| "grad_norm": 0.3812025785446167, | |
| "learning_rate": 6.617551813471503e-06, | |
| "loss": 0.0112, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.5578190533253444, | |
| "grad_norm": 1.339559555053711, | |
| "learning_rate": 6.601360103626944e-06, | |
| "loss": 0.011, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.5638106650689036, | |
| "grad_norm": 0.9652156233787537, | |
| "learning_rate": 6.585168393782384e-06, | |
| "loss": 0.0079, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.5698022768124624, | |
| "grad_norm": 1.0095865726470947, | |
| "learning_rate": 6.568976683937824e-06, | |
| "loss": 0.0095, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.5757938885560216, | |
| "grad_norm": 2.067873239517212, | |
| "learning_rate": 6.552784974093264e-06, | |
| "loss": 0.0115, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.5817855002995804, | |
| "grad_norm": 2.7700791358947754, | |
| "learning_rate": 6.536593264248705e-06, | |
| "loss": 0.0144, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.5877771120431396, | |
| "grad_norm": 1.3618015050888062, | |
| "learning_rate": 6.5204015544041455e-06, | |
| "loss": 0.0152, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.5937687237866986, | |
| "grad_norm": 1.793584942817688, | |
| "learning_rate": 6.504209844559586e-06, | |
| "loss": 0.0156, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.5997603355302576, | |
| "grad_norm": 1.2789638042449951, | |
| "learning_rate": 6.4880181347150266e-06, | |
| "loss": 0.0142, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.6057519472738166, | |
| "grad_norm": 2.1396851539611816, | |
| "learning_rate": 6.471826424870467e-06, | |
| "loss": 0.0099, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.6117435590173756, | |
| "grad_norm": 2.3870065212249756, | |
| "learning_rate": 6.455634715025907e-06, | |
| "loss": 0.013, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.6177351707609346, | |
| "grad_norm": 1.0365428924560547, | |
| "learning_rate": 6.439443005181347e-06, | |
| "loss": 0.0111, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.6237267825044936, | |
| "grad_norm": 1.7155725955963135, | |
| "learning_rate": 6.423251295336788e-06, | |
| "loss": 0.0104, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.6297183942480529, | |
| "grad_norm": 1.4784544706344604, | |
| "learning_rate": 6.407059585492228e-06, | |
| "loss": 0.009, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.6357100059916116, | |
| "grad_norm": 2.044337749481201, | |
| "learning_rate": 6.390867875647669e-06, | |
| "loss": 0.0109, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.6417016177351709, | |
| "grad_norm": 2.3519575595855713, | |
| "learning_rate": 6.37467616580311e-06, | |
| "loss": 0.0148, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.6476932294787296, | |
| "grad_norm": 1.4037830829620361, | |
| "learning_rate": 6.35848445595855e-06, | |
| "loss": 0.0107, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.6536848412222889, | |
| "grad_norm": 2.146458864212036, | |
| "learning_rate": 6.34229274611399e-06, | |
| "loss": 0.0103, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.6596764529658476, | |
| "grad_norm": 2.8700923919677734, | |
| "learning_rate": 6.326101036269431e-06, | |
| "loss": 0.0167, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.6656680647094069, | |
| "grad_norm": 1.5204532146453857, | |
| "learning_rate": 6.309909326424871e-06, | |
| "loss": 0.0105, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.6716596764529659, | |
| "grad_norm": 1.0328407287597656, | |
| "learning_rate": 6.2937176165803115e-06, | |
| "loss": 0.0095, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.6776512881965249, | |
| "grad_norm": 2.680178165435791, | |
| "learning_rate": 6.277525906735752e-06, | |
| "loss": 0.0143, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.6836428999400839, | |
| "grad_norm": 1.7891147136688232, | |
| "learning_rate": 6.261334196891193e-06, | |
| "loss": 0.0114, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.6896345116836429, | |
| "grad_norm": 2.1444687843322754, | |
| "learning_rate": 6.245142487046633e-06, | |
| "loss": 0.0134, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.6956261234272019, | |
| "grad_norm": 1.5276315212249756, | |
| "learning_rate": 6.228950777202073e-06, | |
| "loss": 0.011, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.7016177351707609, | |
| "grad_norm": 2.835799217224121, | |
| "learning_rate": 6.212759067357513e-06, | |
| "loss": 0.012, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.70760934691432, | |
| "grad_norm": 1.2132076025009155, | |
| "learning_rate": 6.196567357512954e-06, | |
| "loss": 0.0157, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.7136009586578789, | |
| "grad_norm": 1.4709638357162476, | |
| "learning_rate": 6.180375647668394e-06, | |
| "loss": 0.0129, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.719592570401438, | |
| "grad_norm": 0.6052160859107971, | |
| "learning_rate": 6.164183937823834e-06, | |
| "loss": 0.0074, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.7255841821449969, | |
| "grad_norm": 2.8164920806884766, | |
| "learning_rate": 6.147992227979274e-06, | |
| "loss": 0.0156, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.731575793888556, | |
| "grad_norm": 1.6463820934295654, | |
| "learning_rate": 6.131800518134715e-06, | |
| "loss": 0.0135, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.737567405632115, | |
| "grad_norm": 1.992889165878296, | |
| "learning_rate": 6.1156088082901555e-06, | |
| "loss": 0.0112, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.743559017375674, | |
| "grad_norm": 2.074042797088623, | |
| "learning_rate": 6.099417098445596e-06, | |
| "loss": 0.0132, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.749550629119233, | |
| "grad_norm": 1.7670295238494873, | |
| "learning_rate": 6.0832253886010375e-06, | |
| "loss": 0.0092, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.755542240862792, | |
| "grad_norm": 1.3253710269927979, | |
| "learning_rate": 6.067033678756478e-06, | |
| "loss": 0.0105, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.761533852606351, | |
| "grad_norm": 2.247828722000122, | |
| "learning_rate": 6.050841968911918e-06, | |
| "loss": 0.011, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.76752546434991, | |
| "grad_norm": 0.9244122505187988, | |
| "learning_rate": 6.034650259067359e-06, | |
| "loss": 0.0104, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.773517076093469, | |
| "grad_norm": 1.7153069972991943, | |
| "learning_rate": 6.018458549222799e-06, | |
| "loss": 0.0159, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.779508687837028, | |
| "grad_norm": 2.84084415435791, | |
| "learning_rate": 6.002266839378239e-06, | |
| "loss": 0.0145, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.7855002995805873, | |
| "grad_norm": 1.1830694675445557, | |
| "learning_rate": 5.986075129533679e-06, | |
| "loss": 0.0142, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.791491911324146, | |
| "grad_norm": 2.046485424041748, | |
| "learning_rate": 5.96988341968912e-06, | |
| "loss": 0.0108, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.7974835230677053, | |
| "grad_norm": 2.352125883102417, | |
| "learning_rate": 5.95369170984456e-06, | |
| "loss": 0.0129, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.803475134811264, | |
| "grad_norm": 2.4842944145202637, | |
| "learning_rate": 5.9375e-06, | |
| "loss": 0.0116, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.8094667465548233, | |
| "grad_norm": 1.6072431802749634, | |
| "learning_rate": 5.9213082901554405e-06, | |
| "loss": 0.0121, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.8154583582983823, | |
| "grad_norm": 0.8712224364280701, | |
| "learning_rate": 5.9051165803108814e-06, | |
| "loss": 0.0104, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.8214499700419413, | |
| "grad_norm": 0.8175852298736572, | |
| "learning_rate": 5.8889248704663216e-06, | |
| "loss": 0.0094, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.8274415817855003, | |
| "grad_norm": 2.5840089321136475, | |
| "learning_rate": 5.872733160621762e-06, | |
| "loss": 0.0133, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.8334331935290593, | |
| "grad_norm": 1.5247653722763062, | |
| "learning_rate": 5.856541450777203e-06, | |
| "loss": 0.0186, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.8394248052726183, | |
| "grad_norm": 1.4512144327163696, | |
| "learning_rate": 5.840349740932643e-06, | |
| "loss": 0.0135, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.8454164170161773, | |
| "grad_norm": 1.6446938514709473, | |
| "learning_rate": 5.824158031088083e-06, | |
| "loss": 0.0098, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.8514080287597365, | |
| "grad_norm": 1.4512169361114502, | |
| "learning_rate": 5.807966321243523e-06, | |
| "loss": 0.0109, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.8573996405032953, | |
| "grad_norm": 1.1414815187454224, | |
| "learning_rate": 5.791774611398964e-06, | |
| "loss": 0.0093, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.8633912522468545, | |
| "grad_norm": 2.5550334453582764, | |
| "learning_rate": 5.775582901554405e-06, | |
| "loss": 0.0159, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.8693828639904133, | |
| "grad_norm": 1.7715239524841309, | |
| "learning_rate": 5.759391191709845e-06, | |
| "loss": 0.0087, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.8753744757339725, | |
| "grad_norm": 1.2993277311325073, | |
| "learning_rate": 5.743199481865286e-06, | |
| "loss": 0.02, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.8813660874775313, | |
| "grad_norm": 1.139583945274353, | |
| "learning_rate": 5.727007772020726e-06, | |
| "loss": 0.0097, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.8873576992210905, | |
| "grad_norm": 1.481346845626831, | |
| "learning_rate": 5.710816062176166e-06, | |
| "loss": 0.0107, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.8933493109646495, | |
| "grad_norm": 0.9106934070587158, | |
| "learning_rate": 5.6946243523316065e-06, | |
| "loss": 0.0114, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.8993409227082085, | |
| "grad_norm": 0.6988396048545837, | |
| "learning_rate": 5.6784326424870475e-06, | |
| "loss": 0.0127, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.9053325344517675, | |
| "grad_norm": 0.6591480374336243, | |
| "learning_rate": 5.662240932642488e-06, | |
| "loss": 0.0113, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.9113241461953265, | |
| "grad_norm": 2.382932662963867, | |
| "learning_rate": 5.646049222797928e-06, | |
| "loss": 0.0125, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.9173157579388855, | |
| "grad_norm": 0.7922401428222656, | |
| "learning_rate": 5.629857512953369e-06, | |
| "loss": 0.0093, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.9233073696824445, | |
| "grad_norm": 1.2889260053634644, | |
| "learning_rate": 5.613665803108809e-06, | |
| "loss": 0.0124, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.9292989814260038, | |
| "grad_norm": 2.0242807865142822, | |
| "learning_rate": 5.597474093264249e-06, | |
| "loss": 0.0096, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.9352905931695625, | |
| "grad_norm": 1.228401780128479, | |
| "learning_rate": 5.581282383419689e-06, | |
| "loss": 0.0105, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.9412822049131218, | |
| "grad_norm": 0.5464358329772949, | |
| "learning_rate": 5.56509067357513e-06, | |
| "loss": 0.0119, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.9472738166566805, | |
| "grad_norm": 1.2989234924316406, | |
| "learning_rate": 5.54889896373057e-06, | |
| "loss": 0.0102, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.9532654284002398, | |
| "grad_norm": 1.615453839302063, | |
| "learning_rate": 5.53270725388601e-06, | |
| "loss": 0.0125, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.9592570401437985, | |
| "grad_norm": 1.5663808584213257, | |
| "learning_rate": 5.5165155440414505e-06, | |
| "loss": 0.009, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.9652486518873578, | |
| "grad_norm": 1.0305567979812622, | |
| "learning_rate": 5.5003238341968915e-06, | |
| "loss": 0.0079, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.9712402636309168, | |
| "grad_norm": 1.7442350387573242, | |
| "learning_rate": 5.484132124352332e-06, | |
| "loss": 0.0166, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.9772318753744758, | |
| "grad_norm": 1.3776471614837646, | |
| "learning_rate": 5.4679404145077734e-06, | |
| "loss": 0.0101, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.9832234871180348, | |
| "grad_norm": 1.412724494934082, | |
| "learning_rate": 5.4517487046632136e-06, | |
| "loss": 0.012, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.9892150988615938, | |
| "grad_norm": 1.9947086572647095, | |
| "learning_rate": 5.435556994818654e-06, | |
| "loss": 0.0156, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.9952067106051528, | |
| "grad_norm": 1.3730186223983765, | |
| "learning_rate": 5.419365284974094e-06, | |
| "loss": 0.0083, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 2.0011983223487118, | |
| "grad_norm": 0.7743808627128601, | |
| "learning_rate": 5.403173575129535e-06, | |
| "loss": 0.0067, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 2.007189934092271, | |
| "grad_norm": 1.0866020917892456, | |
| "learning_rate": 5.386981865284975e-06, | |
| "loss": 0.0062, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 2.0131815458358298, | |
| "grad_norm": 2.021395206451416, | |
| "learning_rate": 5.370790155440415e-06, | |
| "loss": 0.0056, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 2.019173157579389, | |
| "grad_norm": 0.7016503810882568, | |
| "learning_rate": 5.354598445595855e-06, | |
| "loss": 0.0031, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 2.0251647693229478, | |
| "grad_norm": 0.2258431762456894, | |
| "learning_rate": 5.338406735751296e-06, | |
| "loss": 0.0048, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 2.031156381066507, | |
| "grad_norm": 0.49931764602661133, | |
| "learning_rate": 5.322215025906736e-06, | |
| "loss": 0.0055, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 2.0371479928100658, | |
| "grad_norm": 0.5910160541534424, | |
| "learning_rate": 5.3060233160621764e-06, | |
| "loss": 0.0043, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 2.043139604553625, | |
| "grad_norm": 0.3594443202018738, | |
| "learning_rate": 5.2898316062176166e-06, | |
| "loss": 0.0061, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 2.0491312162971838, | |
| "grad_norm": 0.20252452790737152, | |
| "learning_rate": 5.2736398963730575e-06, | |
| "loss": 0.0041, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 2.055122828040743, | |
| "grad_norm": 1.0205600261688232, | |
| "learning_rate": 5.257448186528498e-06, | |
| "loss": 0.0033, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 2.061114439784302, | |
| "grad_norm": 1.057180404663086, | |
| "learning_rate": 5.241256476683938e-06, | |
| "loss": 0.0039, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 2.067106051527861, | |
| "grad_norm": 0.2864468991756439, | |
| "learning_rate": 5.225064766839378e-06, | |
| "loss": 0.003, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 2.07309766327142, | |
| "grad_norm": 0.2602325975894928, | |
| "learning_rate": 5.208873056994819e-06, | |
| "loss": 0.0024, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 2.079089275014979, | |
| "grad_norm": 0.2414165735244751, | |
| "learning_rate": 5.192681347150259e-06, | |
| "loss": 0.0039, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 2.085080886758538, | |
| "grad_norm": 0.2938922047615051, | |
| "learning_rate": 5.176489637305699e-06, | |
| "loss": 0.0027, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 2.091072498502097, | |
| "grad_norm": 1.4712554216384888, | |
| "learning_rate": 5.16029792746114e-06, | |
| "loss": 0.0037, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 2.097064110245656, | |
| "grad_norm": 0.20206642150878906, | |
| "learning_rate": 5.144106217616581e-06, | |
| "loss": 0.0035, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 2.103055721989215, | |
| "grad_norm": 0.68550044298172, | |
| "learning_rate": 5.127914507772021e-06, | |
| "loss": 0.0023, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 2.109047333732774, | |
| "grad_norm": 0.903885006904602, | |
| "learning_rate": 5.111722797927462e-06, | |
| "loss": 0.0056, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 2.115038945476333, | |
| "grad_norm": 0.5431828498840332, | |
| "learning_rate": 5.095531088082902e-06, | |
| "loss": 0.0038, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 2.121030557219892, | |
| "grad_norm": 1.535992980003357, | |
| "learning_rate": 5.0793393782383425e-06, | |
| "loss": 0.0032, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 2.127022168963451, | |
| "grad_norm": 3.78729248046875, | |
| "learning_rate": 5.063147668393783e-06, | |
| "loss": 0.0048, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 2.13301378070701, | |
| "grad_norm": 0.5239720940589905, | |
| "learning_rate": 5.046955958549224e-06, | |
| "loss": 0.0043, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 2.139005392450569, | |
| "grad_norm": 1.523889422416687, | |
| "learning_rate": 5.030764248704664e-06, | |
| "loss": 0.0035, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 2.144997004194128, | |
| "grad_norm": 1.358669400215149, | |
| "learning_rate": 5.014572538860104e-06, | |
| "loss": 0.0046, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 2.1509886159376874, | |
| "grad_norm": 0.19291242957115173, | |
| "learning_rate": 4.998380829015545e-06, | |
| "loss": 0.0033, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 2.156980227681246, | |
| "grad_norm": 0.3006669878959656, | |
| "learning_rate": 4.982189119170985e-06, | |
| "loss": 0.0022, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 2.1629718394248054, | |
| "grad_norm": 0.3663530647754669, | |
| "learning_rate": 4.965997409326425e-06, | |
| "loss": 0.0027, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 2.168963451168364, | |
| "grad_norm": 0.7184218764305115, | |
| "learning_rate": 4.949805699481865e-06, | |
| "loss": 0.005, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 2.1749550629119234, | |
| "grad_norm": 0.3978062570095062, | |
| "learning_rate": 4.933613989637306e-06, | |
| "loss": 0.0031, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 2.180946674655482, | |
| "grad_norm": 1.0698492527008057, | |
| "learning_rate": 4.917422279792747e-06, | |
| "loss": 0.0048, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 2.1869382863990414, | |
| "grad_norm": 0.36532407999038696, | |
| "learning_rate": 4.901230569948187e-06, | |
| "loss": 0.0028, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 2.1929298981426, | |
| "grad_norm": 0.2442449927330017, | |
| "learning_rate": 4.8850388601036275e-06, | |
| "loss": 0.0038, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 2.1989215098861594, | |
| "grad_norm": 0.20055042207241058, | |
| "learning_rate": 4.868847150259068e-06, | |
| "loss": 0.0032, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 2.204913121629718, | |
| "grad_norm": 0.3060378432273865, | |
| "learning_rate": 4.8526554404145086e-06, | |
| "loss": 0.004, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 2.2109047333732774, | |
| "grad_norm": 1.6103514432907104, | |
| "learning_rate": 4.836463730569949e-06, | |
| "loss": 0.003, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 2.2168963451168366, | |
| "grad_norm": 0.7116732597351074, | |
| "learning_rate": 4.820272020725389e-06, | |
| "loss": 0.0033, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 2.2228879568603954, | |
| "grad_norm": 0.41945987939834595, | |
| "learning_rate": 4.804080310880829e-06, | |
| "loss": 0.0041, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 2.2288795686039546, | |
| "grad_norm": 0.524526059627533, | |
| "learning_rate": 4.78788860103627e-06, | |
| "loss": 0.0041, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 2.2348711803475134, | |
| "grad_norm": 0.48254117369651794, | |
| "learning_rate": 4.77169689119171e-06, | |
| "loss": 0.0025, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 2.2408627920910726, | |
| "grad_norm": 1.119223713874817, | |
| "learning_rate": 4.755505181347151e-06, | |
| "loss": 0.0058, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 2.2468544038346314, | |
| "grad_norm": 0.8555364608764648, | |
| "learning_rate": 4.739313471502591e-06, | |
| "loss": 0.0022, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 2.2528460155781906, | |
| "grad_norm": 0.37995922565460205, | |
| "learning_rate": 4.723121761658031e-06, | |
| "loss": 0.0057, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 2.2588376273217494, | |
| "grad_norm": 1.5643342733383179, | |
| "learning_rate": 4.706930051813472e-06, | |
| "loss": 0.0033, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 2.2648292390653086, | |
| "grad_norm": 0.14987821877002716, | |
| "learning_rate": 4.690738341968912e-06, | |
| "loss": 0.0037, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 2.2708208508088674, | |
| "grad_norm": 0.29938772320747375, | |
| "learning_rate": 4.6745466321243525e-06, | |
| "loss": 0.0041, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 2.2768124625524266, | |
| "grad_norm": 0.5936130285263062, | |
| "learning_rate": 4.658354922279793e-06, | |
| "loss": 0.0035, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 2.282804074295986, | |
| "grad_norm": 0.6183104515075684, | |
| "learning_rate": 4.642163212435234e-06, | |
| "loss": 0.0029, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 2.2887956860395446, | |
| "grad_norm": 0.5594862103462219, | |
| "learning_rate": 4.625971502590674e-06, | |
| "loss": 0.0059, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 2.2947872977831034, | |
| "grad_norm": 0.5086492300033569, | |
| "learning_rate": 4.609779792746114e-06, | |
| "loss": 0.0044, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 2.3007789095266626, | |
| "grad_norm": 0.41681790351867676, | |
| "learning_rate": 4.593588082901555e-06, | |
| "loss": 0.0026, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 2.306770521270222, | |
| "grad_norm": 0.7889578938484192, | |
| "learning_rate": 4.577396373056995e-06, | |
| "loss": 0.0026, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 2.3127621330137806, | |
| "grad_norm": 0.6799167394638062, | |
| "learning_rate": 4.561204663212436e-06, | |
| "loss": 0.0036, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 2.31875374475734, | |
| "grad_norm": 0.647998034954071, | |
| "learning_rate": 4.545012953367876e-06, | |
| "loss": 0.0023, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 2.3247453565008986, | |
| "grad_norm": 0.652267336845398, | |
| "learning_rate": 4.528821243523316e-06, | |
| "loss": 0.0026, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 2.330736968244458, | |
| "grad_norm": 1.081817626953125, | |
| "learning_rate": 4.512629533678756e-06, | |
| "loss": 0.0038, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 2.3367285799880166, | |
| "grad_norm": 0.13204053044319153, | |
| "learning_rate": 4.496437823834197e-06, | |
| "loss": 0.0035, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 2.342720191731576, | |
| "grad_norm": 0.4813682436943054, | |
| "learning_rate": 4.4802461139896375e-06, | |
| "loss": 0.0045, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 2.3487118034751346, | |
| "grad_norm": 0.22035902738571167, | |
| "learning_rate": 4.464054404145078e-06, | |
| "loss": 0.002, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 2.354703415218694, | |
| "grad_norm": 0.3482280671596527, | |
| "learning_rate": 4.447862694300519e-06, | |
| "loss": 0.0022, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 2.3606950269622526, | |
| "grad_norm": 1.8050403594970703, | |
| "learning_rate": 4.431670984455959e-06, | |
| "loss": 0.0048, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 2.366686638705812, | |
| "grad_norm": 0.5814374685287476, | |
| "learning_rate": 4.4154792746114e-06, | |
| "loss": 0.0032, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 2.372678250449371, | |
| "grad_norm": 0.35395652055740356, | |
| "learning_rate": 4.39928756476684e-06, | |
| "loss": 0.0036, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 2.37866986219293, | |
| "grad_norm": 0.5275447368621826, | |
| "learning_rate": 4.38309585492228e-06, | |
| "loss": 0.0031, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 2.384661473936489, | |
| "grad_norm": 0.9818341135978699, | |
| "learning_rate": 4.366904145077721e-06, | |
| "loss": 0.0024, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 2.390653085680048, | |
| "grad_norm": 0.41264620423316956, | |
| "learning_rate": 4.350712435233161e-06, | |
| "loss": 0.0038, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 2.396644697423607, | |
| "grad_norm": 0.20038236677646637, | |
| "learning_rate": 4.334520725388601e-06, | |
| "loss": 0.0022, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.396644697423607, | |
| "eval_loss": 0.00884172786027193, | |
| "eval_runtime": 4849.1287, | |
| "eval_samples_per_second": 2.753, | |
| "eval_steps_per_second": 0.344, | |
| "eval_wer": 0.7496644515009657, | |
| "step": 4000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 6676, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 4, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.693431387455488e+19, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |