| { | |
| "best_global_step": 6000, | |
| "best_metric": 1.6195586580333698, | |
| "best_model_checkpoint": "./SALAMA_NEW4/checkpoint-6000", | |
| "epoch": 3.5949670461354106, | |
| "eval_steps": 2000, | |
| "global_step": 6000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005991611743559017, | |
| "grad_norm": 6.641392230987549, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.1132, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.011983223487118035, | |
| "grad_norm": 6.161131858825684, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.149, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.017974835230677052, | |
| "grad_norm": 5.680447578430176, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.0882, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02396644697423607, | |
| "grad_norm": 3.6827754974365234, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.0995, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.029958058717795086, | |
| "grad_norm": 5.75164794921875, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.1368, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.035949670461354104, | |
| "grad_norm": 5.719022274017334, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.0938, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.041941282204913125, | |
| "grad_norm": 4.832603931427002, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.1163, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04793289394847214, | |
| "grad_norm": 5.619574546813965, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.1008, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05392450569203116, | |
| "grad_norm": 4.749862194061279, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.1126, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05991611743559017, | |
| "grad_norm": 6.061086177825928, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.1134, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0659077291791492, | |
| "grad_norm": 5.554771423339844, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.1178, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.07189934092270821, | |
| "grad_norm": 4.674679756164551, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.0758, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07789095266626722, | |
| "grad_norm": 4.852492332458496, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.0836, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.08388256440982625, | |
| "grad_norm": 5.85217809677124, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.1097, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08987417615338526, | |
| "grad_norm": 6.0412421226501465, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.0921, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.09586578789694428, | |
| "grad_norm": 5.854034423828125, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.1144, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.10185739964050329, | |
| "grad_norm": 4.274904251098633, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.1028, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.10784901138406232, | |
| "grad_norm": 4.002969741821289, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0759, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.11384062312762133, | |
| "grad_norm": 4.920216083526611, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.1074, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.11983223487118035, | |
| "grad_norm": 5.039630889892578, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.1081, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12582384661473936, | |
| "grad_norm": 5.63913631439209, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.0974, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.1318154583582984, | |
| "grad_norm": 4.254915714263916, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.1017, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.1378070701018574, | |
| "grad_norm": 5.292032241821289, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0982, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.14379868184541642, | |
| "grad_norm": 3.856678009033203, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.1015, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.14979029358897544, | |
| "grad_norm": 4.549379825592041, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.1245, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.15578190533253444, | |
| "grad_norm": 4.482922077178955, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.104, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.16177351707609347, | |
| "grad_norm": 3.305623769760132, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.0918, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1677651288196525, | |
| "grad_norm": 3.838818073272705, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.085, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.1737567405632115, | |
| "grad_norm": 4.304590702056885, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.1025, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.17974835230677053, | |
| "grad_norm": 5.107855796813965, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.1035, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.18573996405032953, | |
| "grad_norm": 4.67608118057251, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.1292, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.19173157579388855, | |
| "grad_norm": 3.5006017684936523, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.1232, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.19772318753744758, | |
| "grad_norm": 4.693627834320068, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.117, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.20371479928100658, | |
| "grad_norm": 3.6866164207458496, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.1016, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.2097064110245656, | |
| "grad_norm": 4.933455944061279, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.108, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.21569802276812464, | |
| "grad_norm": 2.64819073677063, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.1101, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.22168963451168364, | |
| "grad_norm": 5.819120407104492, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.1305, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.22768124625524266, | |
| "grad_norm": 4.504668235778809, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.0993, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.23367285799880166, | |
| "grad_norm": 3.761913776397705, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.1033, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.2396644697423607, | |
| "grad_norm": 3.8250932693481445, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.0917, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.24565608148591972, | |
| "grad_norm": 4.284841537475586, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.1012, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2516476932294787, | |
| "grad_norm": 4.682767391204834, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.0861, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2576393049730378, | |
| "grad_norm": 3.7932474613189697, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.1151, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2636309167165968, | |
| "grad_norm": 3.588685989379883, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.1062, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.2696225284601558, | |
| "grad_norm": 3.6409924030303955, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.0987, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.2756141402037148, | |
| "grad_norm": 3.630150318145752, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.0952, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.28160575194727383, | |
| "grad_norm": 3.858980417251587, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.0992, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.28759736369083283, | |
| "grad_norm": 4.108921527862549, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.1153, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.29358897543439183, | |
| "grad_norm": 3.802626848220825, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.1149, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2995805871779509, | |
| "grad_norm": 4.295905113220215, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.1118, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.3055721989215099, | |
| "grad_norm": 4.09583854675293, | |
| "learning_rate": 9.985427461139897e-06, | |
| "loss": 0.0955, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.3115638106650689, | |
| "grad_norm": 3.3187222480773926, | |
| "learning_rate": 9.969235751295337e-06, | |
| "loss": 0.1075, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.31755542240862794, | |
| "grad_norm": 8.15339183807373, | |
| "learning_rate": 9.953044041450778e-06, | |
| "loss": 0.1113, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.32354703415218694, | |
| "grad_norm": 5.273709297180176, | |
| "learning_rate": 9.936852331606218e-06, | |
| "loss": 0.1015, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.32953864589574594, | |
| "grad_norm": 3.836277484893799, | |
| "learning_rate": 9.920660621761659e-06, | |
| "loss": 0.0927, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.335530257639305, | |
| "grad_norm": 5.473774433135986, | |
| "learning_rate": 9.9044689119171e-06, | |
| "loss": 0.1045, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.341521869382864, | |
| "grad_norm": 5.008555889129639, | |
| "learning_rate": 9.888277202072539e-06, | |
| "loss": 0.1193, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.347513481126423, | |
| "grad_norm": 8.994523048400879, | |
| "learning_rate": 9.87208549222798e-06, | |
| "loss": 0.1154, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.35350509286998205, | |
| "grad_norm": 4.773490905761719, | |
| "learning_rate": 9.85589378238342e-06, | |
| "loss": 0.1222, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.35949670461354105, | |
| "grad_norm": 3.776129722595215, | |
| "learning_rate": 9.839702072538862e-06, | |
| "loss": 0.0999, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.36548831635710005, | |
| "grad_norm": 3.550184726715088, | |
| "learning_rate": 9.823510362694301e-06, | |
| "loss": 0.1012, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.37147992810065905, | |
| "grad_norm": 4.164742469787598, | |
| "learning_rate": 9.807318652849742e-06, | |
| "loss": 0.0978, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3774715398442181, | |
| "grad_norm": 3.2359657287597656, | |
| "learning_rate": 9.791126943005183e-06, | |
| "loss": 0.0781, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.3834631515877771, | |
| "grad_norm": 3.4513018131256104, | |
| "learning_rate": 9.774935233160622e-06, | |
| "loss": 0.1098, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3894547633313361, | |
| "grad_norm": 3.489149808883667, | |
| "learning_rate": 9.758743523316063e-06, | |
| "loss": 0.1316, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.39544637507489516, | |
| "grad_norm": 4.0016374588012695, | |
| "learning_rate": 9.742551813471504e-06, | |
| "loss": 0.092, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.40143798681845416, | |
| "grad_norm": 7.272388935089111, | |
| "learning_rate": 9.726360103626944e-06, | |
| "loss": 0.1273, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.40742959856201316, | |
| "grad_norm": 3.443683385848999, | |
| "learning_rate": 9.710168393782385e-06, | |
| "loss": 0.1053, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.4134212103055722, | |
| "grad_norm": 4.35030460357666, | |
| "learning_rate": 9.693976683937824e-06, | |
| "loss": 0.114, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.4194128220491312, | |
| "grad_norm": 3.6015312671661377, | |
| "learning_rate": 9.677784974093265e-06, | |
| "loss": 0.1018, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.4254044337926902, | |
| "grad_norm": 4.1978840827941895, | |
| "learning_rate": 9.661593264248706e-06, | |
| "loss": 0.1151, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4313960455362493, | |
| "grad_norm": 4.4482879638671875, | |
| "learning_rate": 9.645401554404145e-06, | |
| "loss": 0.1114, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.4373876572798083, | |
| "grad_norm": 4.560624599456787, | |
| "learning_rate": 9.629209844559586e-06, | |
| "loss": 0.1207, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4433792690233673, | |
| "grad_norm": 5.984576225280762, | |
| "learning_rate": 9.613018134715027e-06, | |
| "loss": 0.1262, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.44937088076692633, | |
| "grad_norm": 4.926331043243408, | |
| "learning_rate": 9.596826424870466e-06, | |
| "loss": 0.1041, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.45536249251048533, | |
| "grad_norm": 4.679685115814209, | |
| "learning_rate": 9.580634715025907e-06, | |
| "loss": 0.1196, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.46135410425404433, | |
| "grad_norm": 4.890758991241455, | |
| "learning_rate": 9.564443005181347e-06, | |
| "loss": 0.1145, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.46734571599760333, | |
| "grad_norm": 4.479861259460449, | |
| "learning_rate": 9.548251295336788e-06, | |
| "loss": 0.1223, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4733373277411624, | |
| "grad_norm": 4.720247745513916, | |
| "learning_rate": 9.532059585492229e-06, | |
| "loss": 0.1056, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4793289394847214, | |
| "grad_norm": 3.5392069816589355, | |
| "learning_rate": 9.51586787564767e-06, | |
| "loss": 0.0992, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.4853205512282804, | |
| "grad_norm": 3.7479641437530518, | |
| "learning_rate": 9.49967616580311e-06, | |
| "loss": 0.1007, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.49131216297183944, | |
| "grad_norm": 4.393255710601807, | |
| "learning_rate": 9.48348445595855e-06, | |
| "loss": 0.1394, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.49730377471539844, | |
| "grad_norm": 3.2709712982177734, | |
| "learning_rate": 9.467292746113991e-06, | |
| "loss": 0.0992, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.5032953864589574, | |
| "grad_norm": 4.891329288482666, | |
| "learning_rate": 9.451101036269432e-06, | |
| "loss": 0.1123, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.5092869982025164, | |
| "grad_norm": 4.337294101715088, | |
| "learning_rate": 9.434909326424871e-06, | |
| "loss": 0.0924, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.5152786099460755, | |
| "grad_norm": 3.9342563152313232, | |
| "learning_rate": 9.418717616580312e-06, | |
| "loss": 0.1099, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.5212702216896345, | |
| "grad_norm": 3.220473527908325, | |
| "learning_rate": 9.402525906735751e-06, | |
| "loss": 0.0935, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.5272618334331935, | |
| "grad_norm": 4.463641166687012, | |
| "learning_rate": 9.386334196891192e-06, | |
| "loss": 0.1101, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5332534451767525, | |
| "grad_norm": 4.686647415161133, | |
| "learning_rate": 9.370142487046633e-06, | |
| "loss": 0.1161, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5392450569203115, | |
| "grad_norm": 3.4189178943634033, | |
| "learning_rate": 9.353950777202073e-06, | |
| "loss": 0.1144, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.5452366686638705, | |
| "grad_norm": 3.407076597213745, | |
| "learning_rate": 9.337759067357514e-06, | |
| "loss": 0.0926, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.5512282804074295, | |
| "grad_norm": 5.300261974334717, | |
| "learning_rate": 9.321567357512955e-06, | |
| "loss": 0.1047, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5572198921509887, | |
| "grad_norm": 2.8152244091033936, | |
| "learning_rate": 9.305375647668394e-06, | |
| "loss": 0.1082, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5632115038945477, | |
| "grad_norm": 4.786292552947998, | |
| "learning_rate": 9.289183937823835e-06, | |
| "loss": 0.1206, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5692031156381067, | |
| "grad_norm": 3.642893075942993, | |
| "learning_rate": 9.272992227979276e-06, | |
| "loss": 0.0863, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5751947273816657, | |
| "grad_norm": 4.487881660461426, | |
| "learning_rate": 9.256800518134715e-06, | |
| "loss": 0.1171, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5811863391252247, | |
| "grad_norm": 4.288365364074707, | |
| "learning_rate": 9.240608808290156e-06, | |
| "loss": 0.1095, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5871779508687837, | |
| "grad_norm": 3.9558470249176025, | |
| "learning_rate": 9.224417098445595e-06, | |
| "loss": 0.1185, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5931695626123428, | |
| "grad_norm": 2.573941707611084, | |
| "learning_rate": 9.208225388601038e-06, | |
| "loss": 0.1157, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5991611743559018, | |
| "grad_norm": 3.7058804035186768, | |
| "learning_rate": 9.192033678756477e-06, | |
| "loss": 0.1173, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.6051527860994608, | |
| "grad_norm": 4.578528881072998, | |
| "learning_rate": 9.175841968911918e-06, | |
| "loss": 0.1049, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.6111443978430198, | |
| "grad_norm": 4.096612930297852, | |
| "learning_rate": 9.15965025906736e-06, | |
| "loss": 0.102, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.6171360095865788, | |
| "grad_norm": 2.6690211296081543, | |
| "learning_rate": 9.143458549222799e-06, | |
| "loss": 0.1219, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.6231276213301378, | |
| "grad_norm": 4.082022190093994, | |
| "learning_rate": 9.12726683937824e-06, | |
| "loss": 0.0958, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.6291192330736968, | |
| "grad_norm": 5.364941596984863, | |
| "learning_rate": 9.11107512953368e-06, | |
| "loss": 0.1152, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.6351108448172559, | |
| "grad_norm": 3.664750099182129, | |
| "learning_rate": 9.09488341968912e-06, | |
| "loss": 0.107, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.6411024565608149, | |
| "grad_norm": 4.337752342224121, | |
| "learning_rate": 9.07869170984456e-06, | |
| "loss": 0.1053, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.6470940683043739, | |
| "grad_norm": 4.239952564239502, | |
| "learning_rate": 9.0625e-06, | |
| "loss": 0.0965, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6530856800479329, | |
| "grad_norm": 3.8299310207366943, | |
| "learning_rate": 9.046308290155441e-06, | |
| "loss": 0.0995, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.6590772917914919, | |
| "grad_norm": 3.6321651935577393, | |
| "learning_rate": 9.030116580310882e-06, | |
| "loss": 0.0991, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6650689035350509, | |
| "grad_norm": 3.9516804218292236, | |
| "learning_rate": 9.013924870466321e-06, | |
| "loss": 0.1068, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.67106051527861, | |
| "grad_norm": 4.31940221786499, | |
| "learning_rate": 8.997733160621762e-06, | |
| "loss": 0.1065, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.677052127022169, | |
| "grad_norm": 4.485226154327393, | |
| "learning_rate": 8.981541450777203e-06, | |
| "loss": 0.1318, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.683043738765728, | |
| "grad_norm": 2.977614402770996, | |
| "learning_rate": 8.965349740932643e-06, | |
| "loss": 0.1111, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.689035350509287, | |
| "grad_norm": 4.644229888916016, | |
| "learning_rate": 8.949158031088084e-06, | |
| "loss": 0.1119, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.695026962252846, | |
| "grad_norm": 5.099035263061523, | |
| "learning_rate": 8.932966321243523e-06, | |
| "loss": 0.1033, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.701018573996405, | |
| "grad_norm": 3.9480855464935303, | |
| "learning_rate": 8.916774611398964e-06, | |
| "loss": 0.0895, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.7070101857399641, | |
| "grad_norm": 4.888814449310303, | |
| "learning_rate": 8.900582901554405e-06, | |
| "loss": 0.107, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.7130017974835231, | |
| "grad_norm": 2.8887240886688232, | |
| "learning_rate": 8.884391191709846e-06, | |
| "loss": 0.0974, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.7189934092270821, | |
| "grad_norm": 4.195169448852539, | |
| "learning_rate": 8.868199481865287e-06, | |
| "loss": 0.1186, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.7249850209706411, | |
| "grad_norm": 4.0084710121154785, | |
| "learning_rate": 8.852007772020726e-06, | |
| "loss": 0.1081, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.7309766327142001, | |
| "grad_norm": 3.978888511657715, | |
| "learning_rate": 8.835816062176167e-06, | |
| "loss": 0.1271, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.7369682444577591, | |
| "grad_norm": 2.9603512287139893, | |
| "learning_rate": 8.819624352331608e-06, | |
| "loss": 0.1017, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.7429598562013181, | |
| "grad_norm": 3.808750629425049, | |
| "learning_rate": 8.803432642487047e-06, | |
| "loss": 0.1119, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.7489514679448772, | |
| "grad_norm": 4.731956481933594, | |
| "learning_rate": 8.787240932642488e-06, | |
| "loss": 0.1015, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.7549430796884362, | |
| "grad_norm": 4.4244866371154785, | |
| "learning_rate": 8.771049222797927e-06, | |
| "loss": 0.1009, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.7609346914319952, | |
| "grad_norm": 5.3592305183410645, | |
| "learning_rate": 8.754857512953368e-06, | |
| "loss": 0.1175, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.7669263031755542, | |
| "grad_norm": 3.1789183616638184, | |
| "learning_rate": 8.73866580310881e-06, | |
| "loss": 0.1079, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.7729179149191132, | |
| "grad_norm": 4.719349384307861, | |
| "learning_rate": 8.722474093264249e-06, | |
| "loss": 0.1034, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7789095266626722, | |
| "grad_norm": 3.4696717262268066, | |
| "learning_rate": 8.70628238341969e-06, | |
| "loss": 0.1042, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7849011384062313, | |
| "grad_norm": 3.1757519245147705, | |
| "learning_rate": 8.69009067357513e-06, | |
| "loss": 0.0919, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7908927501497903, | |
| "grad_norm": 5.520872592926025, | |
| "learning_rate": 8.67389896373057e-06, | |
| "loss": 0.1011, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7968843618933493, | |
| "grad_norm": 4.574864387512207, | |
| "learning_rate": 8.657707253886011e-06, | |
| "loss": 0.1123, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.8028759736369083, | |
| "grad_norm": 3.809239625930786, | |
| "learning_rate": 8.641515544041452e-06, | |
| "loss": 0.1056, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.8088675853804673, | |
| "grad_norm": 4.405616283416748, | |
| "learning_rate": 8.625323834196891e-06, | |
| "loss": 0.1141, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.8148591971240263, | |
| "grad_norm": 4.816131114959717, | |
| "learning_rate": 8.609132124352332e-06, | |
| "loss": 0.1042, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.8208508088675854, | |
| "grad_norm": 4.3509602546691895, | |
| "learning_rate": 8.592940414507773e-06, | |
| "loss": 0.098, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.8268424206111444, | |
| "grad_norm": 3.7490243911743164, | |
| "learning_rate": 8.576748704663214e-06, | |
| "loss": 0.1049, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.8328340323547034, | |
| "grad_norm": 5.330031394958496, | |
| "learning_rate": 8.560556994818653e-06, | |
| "loss": 0.105, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.8388256440982624, | |
| "grad_norm": 4.027228355407715, | |
| "learning_rate": 8.544365284974094e-06, | |
| "loss": 0.1249, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.8448172558418214, | |
| "grad_norm": 4.0217790603637695, | |
| "learning_rate": 8.528173575129535e-06, | |
| "loss": 0.1259, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.8508088675853804, | |
| "grad_norm": 4.059916973114014, | |
| "learning_rate": 8.511981865284975e-06, | |
| "loss": 0.1173, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.8568004793289394, | |
| "grad_norm": 4.621293544769287, | |
| "learning_rate": 8.495790155440416e-06, | |
| "loss": 0.1037, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.8627920910724985, | |
| "grad_norm": 3.6666407585144043, | |
| "learning_rate": 8.479598445595855e-06, | |
| "loss": 0.1177, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.8687837028160575, | |
| "grad_norm": 4.930758953094482, | |
| "learning_rate": 8.463406735751296e-06, | |
| "loss": 0.1055, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8747753145596165, | |
| "grad_norm": 4.322690010070801, | |
| "learning_rate": 8.447215025906737e-06, | |
| "loss": 0.1052, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.8807669263031755, | |
| "grad_norm": 3.768627405166626, | |
| "learning_rate": 8.431023316062176e-06, | |
| "loss": 0.1099, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8867585380467345, | |
| "grad_norm": 3.582303047180176, | |
| "learning_rate": 8.414831606217617e-06, | |
| "loss": 0.1061, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.8927501497902935, | |
| "grad_norm": 3.874319076538086, | |
| "learning_rate": 8.398639896373058e-06, | |
| "loss": 0.126, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8987417615338527, | |
| "grad_norm": 4.040960311889648, | |
| "learning_rate": 8.382448186528497e-06, | |
| "loss": 0.089, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.9047333732774117, | |
| "grad_norm": 4.185450077056885, | |
| "learning_rate": 8.366256476683938e-06, | |
| "loss": 0.0955, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.9107249850209707, | |
| "grad_norm": 3.388444185256958, | |
| "learning_rate": 8.35006476683938e-06, | |
| "loss": 0.1123, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.9167165967645297, | |
| "grad_norm": 4.470240592956543, | |
| "learning_rate": 8.333873056994819e-06, | |
| "loss": 0.1213, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.9227082085080887, | |
| "grad_norm": 4.528209686279297, | |
| "learning_rate": 8.31768134715026e-06, | |
| "loss": 0.1152, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.9286998202516477, | |
| "grad_norm": 4.039801597595215, | |
| "learning_rate": 8.301489637305699e-06, | |
| "loss": 0.0907, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.9346914319952067, | |
| "grad_norm": 2.667508840560913, | |
| "learning_rate": 8.28529792746114e-06, | |
| "loss": 0.1103, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.9406830437387658, | |
| "grad_norm": 4.166356086730957, | |
| "learning_rate": 8.269106217616581e-06, | |
| "loss": 0.1034, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.9466746554823248, | |
| "grad_norm": 4.047358989715576, | |
| "learning_rate": 8.252914507772022e-06, | |
| "loss": 0.1086, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.9526662672258838, | |
| "grad_norm": 3.4767513275146484, | |
| "learning_rate": 8.236722797927463e-06, | |
| "loss": 0.0982, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.9586578789694428, | |
| "grad_norm": 4.475752830505371, | |
| "learning_rate": 8.220531088082902e-06, | |
| "loss": 0.0879, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.9646494907130018, | |
| "grad_norm": 3.3900721073150635, | |
| "learning_rate": 8.204339378238343e-06, | |
| "loss": 0.0861, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.9706411024565608, | |
| "grad_norm": 3.077188014984131, | |
| "learning_rate": 8.188147668393784e-06, | |
| "loss": 0.1024, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.9766327142001199, | |
| "grad_norm": 4.166396141052246, | |
| "learning_rate": 8.171955958549223e-06, | |
| "loss": 0.0942, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9826243259436789, | |
| "grad_norm": 2.931767463684082, | |
| "learning_rate": 8.155764248704664e-06, | |
| "loss": 0.0833, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.9886159376872379, | |
| "grad_norm": 3.9457390308380127, | |
| "learning_rate": 8.139572538860104e-06, | |
| "loss": 0.108, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.9946075494307969, | |
| "grad_norm": 4.256587505340576, | |
| "learning_rate": 8.123380829015545e-06, | |
| "loss": 0.0926, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.0005991611743559, | |
| "grad_norm": 1.9296483993530273, | |
| "learning_rate": 8.107189119170986e-06, | |
| "loss": 0.0814, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.0065907729179149, | |
| "grad_norm": 2.8497297763824463, | |
| "learning_rate": 8.090997409326425e-06, | |
| "loss": 0.0399, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.0125823846614739, | |
| "grad_norm": 1.6683580875396729, | |
| "learning_rate": 8.074805699481866e-06, | |
| "loss": 0.0465, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.0185739964050329, | |
| "grad_norm": 2.067464828491211, | |
| "learning_rate": 8.058613989637307e-06, | |
| "loss": 0.0388, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.0245656081485919, | |
| "grad_norm": 3.085991382598877, | |
| "learning_rate": 8.042422279792746e-06, | |
| "loss": 0.0389, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.030557219892151, | |
| "grad_norm": 2.972982883453369, | |
| "learning_rate": 8.026230569948187e-06, | |
| "loss": 0.0432, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.03654883163571, | |
| "grad_norm": 3.642639398574829, | |
| "learning_rate": 8.010038860103628e-06, | |
| "loss": 0.0474, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.042540443379269, | |
| "grad_norm": 3.922886848449707, | |
| "learning_rate": 7.993847150259067e-06, | |
| "loss": 0.0362, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.048532055122828, | |
| "grad_norm": 2.143171787261963, | |
| "learning_rate": 7.977655440414508e-06, | |
| "loss": 0.0453, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.054523666866387, | |
| "grad_norm": 2.2521893978118896, | |
| "learning_rate": 7.96146373056995e-06, | |
| "loss": 0.0347, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.060515278609946, | |
| "grad_norm": 2.5530147552490234, | |
| "learning_rate": 7.94527202072539e-06, | |
| "loss": 0.0321, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.066506890353505, | |
| "grad_norm": 1.3185819387435913, | |
| "learning_rate": 7.92908031088083e-06, | |
| "loss": 0.0352, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.072498502097064, | |
| "grad_norm": 4.018706321716309, | |
| "learning_rate": 7.91288860103627e-06, | |
| "loss": 0.0528, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.078490113840623, | |
| "grad_norm": 2.9235482215881348, | |
| "learning_rate": 7.896696891191711e-06, | |
| "loss": 0.0291, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.084481725584182, | |
| "grad_norm": 3.683203935623169, | |
| "learning_rate": 7.88050518134715e-06, | |
| "loss": 0.0398, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.090473337327741, | |
| "grad_norm": 7.694185256958008, | |
| "learning_rate": 7.864313471502592e-06, | |
| "loss": 0.0428, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.0964649490713, | |
| "grad_norm": 2.377105474472046, | |
| "learning_rate": 7.848121761658031e-06, | |
| "loss": 0.0474, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.102456560814859, | |
| "grad_norm": 2.613847494125366, | |
| "learning_rate": 7.831930051813472e-06, | |
| "loss": 0.0346, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.1084481725584183, | |
| "grad_norm": 2.805191993713379, | |
| "learning_rate": 7.815738341968913e-06, | |
| "loss": 0.043, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.1144397843019773, | |
| "grad_norm": 3.0600552558898926, | |
| "learning_rate": 7.799546632124352e-06, | |
| "loss": 0.0422, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.1204313960455363, | |
| "grad_norm": 1.6137948036193848, | |
| "learning_rate": 7.783354922279793e-06, | |
| "loss": 0.036, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.1264230077890953, | |
| "grad_norm": 1.492092251777649, | |
| "learning_rate": 7.767163212435234e-06, | |
| "loss": 0.0315, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.1324146195326543, | |
| "grad_norm": 2.410632610321045, | |
| "learning_rate": 7.750971502590674e-06, | |
| "loss": 0.0361, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.1384062312762133, | |
| "grad_norm": 2.9959256649017334, | |
| "learning_rate": 7.734779792746114e-06, | |
| "loss": 0.0248, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.1443978430197723, | |
| "grad_norm": 1.860141634941101, | |
| "learning_rate": 7.718588082901555e-06, | |
| "loss": 0.0337, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.1503894547633313, | |
| "grad_norm": 3.2396111488342285, | |
| "learning_rate": 7.702396373056995e-06, | |
| "loss": 0.0319, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.1563810665068903, | |
| "grad_norm": 3.930166721343994, | |
| "learning_rate": 7.686204663212436e-06, | |
| "loss": 0.0427, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.1623726782504493, | |
| "grad_norm": 2.2750892639160156, | |
| "learning_rate": 7.670012953367875e-06, | |
| "loss": 0.0417, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.1683642899940083, | |
| "grad_norm": 3.2963342666625977, | |
| "learning_rate": 7.653821243523318e-06, | |
| "loss": 0.0415, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.1743559017375673, | |
| "grad_norm": 2.719001531600952, | |
| "learning_rate": 7.637629533678757e-06, | |
| "loss": 0.0308, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.1803475134811263, | |
| "grad_norm": 3.191821813583374, | |
| "learning_rate": 7.621437823834198e-06, | |
| "loss": 0.0323, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.1863391252246855, | |
| "grad_norm": 2.4749386310577393, | |
| "learning_rate": 7.605246113989638e-06, | |
| "loss": 0.035, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.1923307369682445, | |
| "grad_norm": 2.652811050415039, | |
| "learning_rate": 7.589054404145079e-06, | |
| "loss": 0.0456, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.1983223487118035, | |
| "grad_norm": 2.6856987476348877, | |
| "learning_rate": 7.572862694300519e-06, | |
| "loss": 0.0313, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.1983223487118035, | |
| "eval_loss": 0.04815296828746796, | |
| "eval_runtime": 4874.046, | |
| "eval_samples_per_second": 2.739, | |
| "eval_steps_per_second": 0.342, | |
| "eval_wer": 3.9869195765987637, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.2043139604553625, | |
| "grad_norm": 2.2129974365234375, | |
| "learning_rate": 7.556670984455959e-06, | |
| "loss": 0.0343, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.2103055721989215, | |
| "grad_norm": 2.4016990661621094, | |
| "learning_rate": 7.5404792746113994e-06, | |
| "loss": 0.043, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.2162971839424805, | |
| "grad_norm": 2.5401735305786133, | |
| "learning_rate": 7.52428756476684e-06, | |
| "loss": 0.0452, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.2222887956860395, | |
| "grad_norm": 2.0397777557373047, | |
| "learning_rate": 7.5080958549222805e-06, | |
| "loss": 0.039, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.2282804074295985, | |
| "grad_norm": 3.3085644245147705, | |
| "learning_rate": 7.491904145077721e-06, | |
| "loss": 0.043, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.2342720191731575, | |
| "grad_norm": 1.1905828714370728, | |
| "learning_rate": 7.475712435233161e-06, | |
| "loss": 0.0406, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.2402636309167165, | |
| "grad_norm": 4.510779857635498, | |
| "learning_rate": 7.459520725388602e-06, | |
| "loss": 0.0494, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.2462552426602755, | |
| "grad_norm": 2.616729497909546, | |
| "learning_rate": 7.443329015544042e-06, | |
| "loss": 0.0436, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.2522468544038348, | |
| "grad_norm": 2.109017848968506, | |
| "learning_rate": 7.427137305699482e-06, | |
| "loss": 0.042, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.2582384661473935, | |
| "grad_norm": 2.5082507133483887, | |
| "learning_rate": 7.410945595854922e-06, | |
| "loss": 0.0307, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.2642300778909528, | |
| "grad_norm": 2.2173380851745605, | |
| "learning_rate": 7.394753886010363e-06, | |
| "loss": 0.052, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.2702216896345118, | |
| "grad_norm": 2.330674171447754, | |
| "learning_rate": 7.378562176165803e-06, | |
| "loss": 0.0357, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.2762133013780708, | |
| "grad_norm": 2.3817524909973145, | |
| "learning_rate": 7.362370466321243e-06, | |
| "loss": 0.0443, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.2822049131216298, | |
| "grad_norm": 1.9407250881195068, | |
| "learning_rate": 7.346178756476684e-06, | |
| "loss": 0.0469, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.2881965248651888, | |
| "grad_norm": 1.5077818632125854, | |
| "learning_rate": 7.329987046632125e-06, | |
| "loss": 0.0351, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.2941881366087478, | |
| "grad_norm": 1.8476344347000122, | |
| "learning_rate": 7.3137953367875655e-06, | |
| "loss": 0.0435, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.3001797483523068, | |
| "grad_norm": 1.7587623596191406, | |
| "learning_rate": 7.2976036269430065e-06, | |
| "loss": 0.0381, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.3061713600958658, | |
| "grad_norm": 3.003218173980713, | |
| "learning_rate": 7.281411917098447e-06, | |
| "loss": 0.0306, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.3121629718394248, | |
| "grad_norm": 2.0112128257751465, | |
| "learning_rate": 7.265220207253887e-06, | |
| "loss": 0.0401, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.3181545835829838, | |
| "grad_norm": 2.3190035820007324, | |
| "learning_rate": 7.249028497409327e-06, | |
| "loss": 0.0457, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.3241461953265428, | |
| "grad_norm": 2.1521100997924805, | |
| "learning_rate": 7.232836787564768e-06, | |
| "loss": 0.0403, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.330137807070102, | |
| "grad_norm": 2.4302561283111572, | |
| "learning_rate": 7.216645077720208e-06, | |
| "loss": 0.04, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.3361294188136608, | |
| "grad_norm": 3.34147047996521, | |
| "learning_rate": 7.200453367875648e-06, | |
| "loss": 0.0379, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.34212103055722, | |
| "grad_norm": 2.458252191543579, | |
| "learning_rate": 7.184261658031088e-06, | |
| "loss": 0.0464, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.348112642300779, | |
| "grad_norm": 3.149097204208374, | |
| "learning_rate": 7.168069948186529e-06, | |
| "loss": 0.0395, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.354104254044338, | |
| "grad_norm": 3.174875020980835, | |
| "learning_rate": 7.151878238341969e-06, | |
| "loss": 0.036, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.360095865787897, | |
| "grad_norm": 2.373333215713501, | |
| "learning_rate": 7.1356865284974095e-06, | |
| "loss": 0.042, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.366087477531456, | |
| "grad_norm": 2.6968131065368652, | |
| "learning_rate": 7.1194948186528505e-06, | |
| "loss": 0.0377, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.372079089275015, | |
| "grad_norm": 1.8244385719299316, | |
| "learning_rate": 7.103303108808291e-06, | |
| "loss": 0.0373, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.378070701018574, | |
| "grad_norm": 2.684811592102051, | |
| "learning_rate": 7.087111398963731e-06, | |
| "loss": 0.0514, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.384062312762133, | |
| "grad_norm": 2.1189537048339844, | |
| "learning_rate": 7.070919689119171e-06, | |
| "loss": 0.0327, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.390053924505692, | |
| "grad_norm": 2.272768497467041, | |
| "learning_rate": 7.054727979274612e-06, | |
| "loss": 0.0405, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.396045536249251, | |
| "grad_norm": 2.2334699630737305, | |
| "learning_rate": 7.038536269430052e-06, | |
| "loss": 0.0425, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.40203714799281, | |
| "grad_norm": 2.5346498489379883, | |
| "learning_rate": 7.022344559585493e-06, | |
| "loss": 0.0343, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.4080287597363692, | |
| "grad_norm": 2.477675676345825, | |
| "learning_rate": 7.006152849740934e-06, | |
| "loss": 0.0333, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.414020371479928, | |
| "grad_norm": 3.833782911300659, | |
| "learning_rate": 6.989961139896374e-06, | |
| "loss": 0.0482, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.4200119832234872, | |
| "grad_norm": 2.571751117706299, | |
| "learning_rate": 6.973769430051814e-06, | |
| "loss": 0.0421, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.4260035949670462, | |
| "grad_norm": 2.7097127437591553, | |
| "learning_rate": 6.957577720207255e-06, | |
| "loss": 0.0447, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.4319952067106052, | |
| "grad_norm": 1.97878098487854, | |
| "learning_rate": 6.941386010362695e-06, | |
| "loss": 0.0445, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.4379868184541642, | |
| "grad_norm": 2.1338934898376465, | |
| "learning_rate": 6.925194300518135e-06, | |
| "loss": 0.0484, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.4439784301977232, | |
| "grad_norm": 2.3054728507995605, | |
| "learning_rate": 6.9090025906735755e-06, | |
| "loss": 0.0327, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.4499700419412822, | |
| "grad_norm": 3.713162422180176, | |
| "learning_rate": 6.8928108808290165e-06, | |
| "loss": 0.0388, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.4559616536848412, | |
| "grad_norm": 2.2444119453430176, | |
| "learning_rate": 6.876619170984457e-06, | |
| "loss": 0.0364, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.4619532654284002, | |
| "grad_norm": 2.1193532943725586, | |
| "learning_rate": 6.860427461139897e-06, | |
| "loss": 0.0439, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.4679448771719592, | |
| "grad_norm": 2.278380870819092, | |
| "learning_rate": 6.844235751295337e-06, | |
| "loss": 0.0516, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.4739364889155182, | |
| "grad_norm": 2.936154365539551, | |
| "learning_rate": 6.828044041450778e-06, | |
| "loss": 0.0386, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.4799281006590772, | |
| "grad_norm": 3.983410358428955, | |
| "learning_rate": 6.811852331606218e-06, | |
| "loss": 0.0493, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.4859197124026364, | |
| "grad_norm": 3.1823031902313232, | |
| "learning_rate": 6.795660621761658e-06, | |
| "loss": 0.0339, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.4919113241461952, | |
| "grad_norm": 3.02040696144104, | |
| "learning_rate": 6.779468911917098e-06, | |
| "loss": 0.0377, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.4979029358897544, | |
| "grad_norm": 2.253005266189575, | |
| "learning_rate": 6.763277202072539e-06, | |
| "loss": 0.0385, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.5038945476333132, | |
| "grad_norm": 2.6622331142425537, | |
| "learning_rate": 6.747085492227979e-06, | |
| "loss": 0.0388, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.5098861593768724, | |
| "grad_norm": 2.394500732421875, | |
| "learning_rate": 6.7308937823834195e-06, | |
| "loss": 0.037, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.5158777711204314, | |
| "grad_norm": 1.885865330696106, | |
| "learning_rate": 6.714702072538861e-06, | |
| "loss": 0.0314, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.5218693828639904, | |
| "grad_norm": 2.989285469055176, | |
| "learning_rate": 6.6985103626943015e-06, | |
| "loss": 0.038, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.5278609946075494, | |
| "grad_norm": 3.051121950149536, | |
| "learning_rate": 6.682318652849742e-06, | |
| "loss": 0.0438, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.5338526063511084, | |
| "grad_norm": 3.2008793354034424, | |
| "learning_rate": 6.666126943005183e-06, | |
| "loss": 0.0391, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.5398442180946674, | |
| "grad_norm": 2.4947457313537598, | |
| "learning_rate": 6.649935233160623e-06, | |
| "loss": 0.0369, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.5458358298382264, | |
| "grad_norm": 2.38167405128479, | |
| "learning_rate": 6.633743523316063e-06, | |
| "loss": 0.0443, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.5518274415817856, | |
| "grad_norm": 2.149912118911743, | |
| "learning_rate": 6.617551813471503e-06, | |
| "loss": 0.0414, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.5578190533253444, | |
| "grad_norm": 1.8637117147445679, | |
| "learning_rate": 6.601360103626944e-06, | |
| "loss": 0.0321, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.5638106650689036, | |
| "grad_norm": 4.388784408569336, | |
| "learning_rate": 6.585168393782384e-06, | |
| "loss": 0.0443, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.5698022768124624, | |
| "grad_norm": 3.057321548461914, | |
| "learning_rate": 6.568976683937824e-06, | |
| "loss": 0.0358, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.5757938885560216, | |
| "grad_norm": 1.5130738019943237, | |
| "learning_rate": 6.552784974093264e-06, | |
| "loss": 0.0341, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.5817855002995804, | |
| "grad_norm": 2.506476879119873, | |
| "learning_rate": 6.536593264248705e-06, | |
| "loss": 0.0369, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.5877771120431396, | |
| "grad_norm": 1.917100429534912, | |
| "learning_rate": 6.5204015544041455e-06, | |
| "loss": 0.0436, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.5937687237866986, | |
| "grad_norm": 2.6610848903656006, | |
| "learning_rate": 6.504209844559586e-06, | |
| "loss": 0.0371, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.5997603355302576, | |
| "grad_norm": 2.3574564456939697, | |
| "learning_rate": 6.4880181347150266e-06, | |
| "loss": 0.0466, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.6057519472738166, | |
| "grad_norm": 3.3509860038757324, | |
| "learning_rate": 6.471826424870467e-06, | |
| "loss": 0.0351, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.6117435590173756, | |
| "grad_norm": 2.6591145992279053, | |
| "learning_rate": 6.455634715025907e-06, | |
| "loss": 0.0445, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.6177351707609346, | |
| "grad_norm": 3.161813735961914, | |
| "learning_rate": 6.439443005181347e-06, | |
| "loss": 0.0315, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.6237267825044936, | |
| "grad_norm": 2.351088047027588, | |
| "learning_rate": 6.423251295336788e-06, | |
| "loss": 0.0428, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.6297183942480529, | |
| "grad_norm": 2.1950671672821045, | |
| "learning_rate": 6.407059585492228e-06, | |
| "loss": 0.0322, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.6357100059916116, | |
| "grad_norm": 3.3831064701080322, | |
| "learning_rate": 6.390867875647669e-06, | |
| "loss": 0.0465, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.6417016177351709, | |
| "grad_norm": 3.514268636703491, | |
| "learning_rate": 6.37467616580311e-06, | |
| "loss": 0.0458, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.6476932294787296, | |
| "grad_norm": 1.8238720893859863, | |
| "learning_rate": 6.35848445595855e-06, | |
| "loss": 0.0398, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.6536848412222889, | |
| "grad_norm": 2.247701644897461, | |
| "learning_rate": 6.34229274611399e-06, | |
| "loss": 0.0617, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.6596764529658476, | |
| "grad_norm": 2.080018997192383, | |
| "learning_rate": 6.326101036269431e-06, | |
| "loss": 0.0434, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.6656680647094069, | |
| "grad_norm": 2.6713249683380127, | |
| "learning_rate": 6.309909326424871e-06, | |
| "loss": 0.0353, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.6716596764529659, | |
| "grad_norm": 2.1419765949249268, | |
| "learning_rate": 6.2937176165803115e-06, | |
| "loss": 0.0432, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.6776512881965249, | |
| "grad_norm": 2.145838737487793, | |
| "learning_rate": 6.277525906735752e-06, | |
| "loss": 0.0489, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.6836428999400839, | |
| "grad_norm": 3.2524642944335938, | |
| "learning_rate": 6.261334196891193e-06, | |
| "loss": 0.0401, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.6896345116836429, | |
| "grad_norm": 2.0695011615753174, | |
| "learning_rate": 6.245142487046633e-06, | |
| "loss": 0.0387, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.6956261234272019, | |
| "grad_norm": 2.829993963241577, | |
| "learning_rate": 6.228950777202073e-06, | |
| "loss": 0.0307, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.7016177351707609, | |
| "grad_norm": 2.6294639110565186, | |
| "learning_rate": 6.212759067357513e-06, | |
| "loss": 0.0478, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.70760934691432, | |
| "grad_norm": 3.4384214878082275, | |
| "learning_rate": 6.196567357512954e-06, | |
| "loss": 0.0433, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.7136009586578789, | |
| "grad_norm": 2.4208130836486816, | |
| "learning_rate": 6.180375647668394e-06, | |
| "loss": 0.0441, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.719592570401438, | |
| "grad_norm": 2.825674533843994, | |
| "learning_rate": 6.164183937823834e-06, | |
| "loss": 0.0516, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.7255841821449969, | |
| "grad_norm": 2.239126682281494, | |
| "learning_rate": 6.147992227979274e-06, | |
| "loss": 0.0318, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.731575793888556, | |
| "grad_norm": 2.4009127616882324, | |
| "learning_rate": 6.131800518134715e-06, | |
| "loss": 0.0358, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.737567405632115, | |
| "grad_norm": 1.753083348274231, | |
| "learning_rate": 6.1156088082901555e-06, | |
| "loss": 0.035, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.743559017375674, | |
| "grad_norm": 1.7808746099472046, | |
| "learning_rate": 6.099417098445596e-06, | |
| "loss": 0.038, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.749550629119233, | |
| "grad_norm": 2.457718849182129, | |
| "learning_rate": 6.0832253886010375e-06, | |
| "loss": 0.0339, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.755542240862792, | |
| "grad_norm": 2.7318003177642822, | |
| "learning_rate": 6.067033678756478e-06, | |
| "loss": 0.0386, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.761533852606351, | |
| "grad_norm": 2.1199822425842285, | |
| "learning_rate": 6.050841968911918e-06, | |
| "loss": 0.0357, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.76752546434991, | |
| "grad_norm": 3.083209753036499, | |
| "learning_rate": 6.034650259067359e-06, | |
| "loss": 0.0401, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.773517076093469, | |
| "grad_norm": 2.8726754188537598, | |
| "learning_rate": 6.018458549222799e-06, | |
| "loss": 0.047, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.779508687837028, | |
| "grad_norm": 2.853578805923462, | |
| "learning_rate": 6.002266839378239e-06, | |
| "loss": 0.0367, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.7855002995805873, | |
| "grad_norm": 1.9339247941970825, | |
| "learning_rate": 5.986075129533679e-06, | |
| "loss": 0.0324, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.791491911324146, | |
| "grad_norm": 2.1655240058898926, | |
| "learning_rate": 5.96988341968912e-06, | |
| "loss": 0.0345, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.7974835230677053, | |
| "grad_norm": 1.8844058513641357, | |
| "learning_rate": 5.95369170984456e-06, | |
| "loss": 0.0489, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.803475134811264, | |
| "grad_norm": 1.8927345275878906, | |
| "learning_rate": 5.9375e-06, | |
| "loss": 0.0392, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.8094667465548233, | |
| "grad_norm": 2.797908306121826, | |
| "learning_rate": 5.9213082901554405e-06, | |
| "loss": 0.0429, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.8154583582983823, | |
| "grad_norm": 2.138853073120117, | |
| "learning_rate": 5.9051165803108814e-06, | |
| "loss": 0.0366, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.8214499700419413, | |
| "grad_norm": 2.9621222019195557, | |
| "learning_rate": 5.8889248704663216e-06, | |
| "loss": 0.0364, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.8274415817855003, | |
| "grad_norm": 1.9806824922561646, | |
| "learning_rate": 5.872733160621762e-06, | |
| "loss": 0.0292, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.8334331935290593, | |
| "grad_norm": 1.8473615646362305, | |
| "learning_rate": 5.856541450777203e-06, | |
| "loss": 0.0315, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.8394248052726183, | |
| "grad_norm": 2.8493854999542236, | |
| "learning_rate": 5.840349740932643e-06, | |
| "loss": 0.0329, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.8454164170161773, | |
| "grad_norm": 4.222199440002441, | |
| "learning_rate": 5.824158031088083e-06, | |
| "loss": 0.0417, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.8514080287597365, | |
| "grad_norm": 2.80318546295166, | |
| "learning_rate": 5.807966321243523e-06, | |
| "loss": 0.0453, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.8573996405032953, | |
| "grad_norm": 3.1252284049987793, | |
| "learning_rate": 5.791774611398964e-06, | |
| "loss": 0.044, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.8633912522468545, | |
| "grad_norm": 2.628920555114746, | |
| "learning_rate": 5.775582901554405e-06, | |
| "loss": 0.0388, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.8693828639904133, | |
| "grad_norm": 2.189281940460205, | |
| "learning_rate": 5.759391191709845e-06, | |
| "loss": 0.0378, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.8753744757339725, | |
| "grad_norm": 2.4642791748046875, | |
| "learning_rate": 5.743199481865286e-06, | |
| "loss": 0.0418, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.8813660874775313, | |
| "grad_norm": 2.589334726333618, | |
| "learning_rate": 5.727007772020726e-06, | |
| "loss": 0.0367, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.8873576992210905, | |
| "grad_norm": 3.040709972381592, | |
| "learning_rate": 5.710816062176166e-06, | |
| "loss": 0.035, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.8933493109646495, | |
| "grad_norm": 2.8135108947753906, | |
| "learning_rate": 5.6946243523316065e-06, | |
| "loss": 0.0435, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.8993409227082085, | |
| "grad_norm": 1.8438613414764404, | |
| "learning_rate": 5.6784326424870475e-06, | |
| "loss": 0.0416, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.9053325344517675, | |
| "grad_norm": 2.7345640659332275, | |
| "learning_rate": 5.662240932642488e-06, | |
| "loss": 0.0367, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.9113241461953265, | |
| "grad_norm": 2.2093093395233154, | |
| "learning_rate": 5.646049222797928e-06, | |
| "loss": 0.0486, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.9173157579388855, | |
| "grad_norm": 2.6385855674743652, | |
| "learning_rate": 5.629857512953369e-06, | |
| "loss": 0.037, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.9233073696824445, | |
| "grad_norm": 4.1110124588012695, | |
| "learning_rate": 5.613665803108809e-06, | |
| "loss": 0.0418, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.9292989814260038, | |
| "grad_norm": 2.6954362392425537, | |
| "learning_rate": 5.597474093264249e-06, | |
| "loss": 0.0406, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.9352905931695625, | |
| "grad_norm": 3.4439690113067627, | |
| "learning_rate": 5.581282383419689e-06, | |
| "loss": 0.0373, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.9412822049131218, | |
| "grad_norm": 2.6216893196105957, | |
| "learning_rate": 5.56509067357513e-06, | |
| "loss": 0.0319, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.9472738166566805, | |
| "grad_norm": 2.924895763397217, | |
| "learning_rate": 5.54889896373057e-06, | |
| "loss": 0.0407, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.9532654284002398, | |
| "grad_norm": 2.488950729370117, | |
| "learning_rate": 5.53270725388601e-06, | |
| "loss": 0.0341, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.9592570401437985, | |
| "grad_norm": 2.4867169857025146, | |
| "learning_rate": 5.5165155440414505e-06, | |
| "loss": 0.0453, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.9652486518873578, | |
| "grad_norm": 5.1045427322387695, | |
| "learning_rate": 5.5003238341968915e-06, | |
| "loss": 0.0429, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.9712402636309168, | |
| "grad_norm": 2.7689225673675537, | |
| "learning_rate": 5.484132124352332e-06, | |
| "loss": 0.0309, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.9772318753744758, | |
| "grad_norm": 1.944758415222168, | |
| "learning_rate": 5.4679404145077734e-06, | |
| "loss": 0.0357, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.9832234871180348, | |
| "grad_norm": 2.512436628341675, | |
| "learning_rate": 5.4517487046632136e-06, | |
| "loss": 0.0436, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.9892150988615938, | |
| "grad_norm": 2.7214131355285645, | |
| "learning_rate": 5.435556994818654e-06, | |
| "loss": 0.0329, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.9952067106051528, | |
| "grad_norm": 2.3871612548828125, | |
| "learning_rate": 5.419365284974094e-06, | |
| "loss": 0.0368, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 2.0011983223487118, | |
| "grad_norm": 1.4264804124832153, | |
| "learning_rate": 5.403173575129535e-06, | |
| "loss": 0.0283, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 2.007189934092271, | |
| "grad_norm": 0.9049192070960999, | |
| "learning_rate": 5.386981865284975e-06, | |
| "loss": 0.0141, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 2.0131815458358298, | |
| "grad_norm": 1.757786512374878, | |
| "learning_rate": 5.370790155440415e-06, | |
| "loss": 0.0092, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 2.019173157579389, | |
| "grad_norm": 0.922551691532135, | |
| "learning_rate": 5.354598445595855e-06, | |
| "loss": 0.0108, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 2.0251647693229478, | |
| "grad_norm": 1.6763967275619507, | |
| "learning_rate": 5.338406735751296e-06, | |
| "loss": 0.0081, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 2.031156381066507, | |
| "grad_norm": 1.023136854171753, | |
| "learning_rate": 5.322215025906736e-06, | |
| "loss": 0.0106, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 2.0371479928100658, | |
| "grad_norm": 1.0758904218673706, | |
| "learning_rate": 5.3060233160621764e-06, | |
| "loss": 0.0127, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 2.043139604553625, | |
| "grad_norm": 0.85313481092453, | |
| "learning_rate": 5.2898316062176166e-06, | |
| "loss": 0.0112, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 2.0491312162971838, | |
| "grad_norm": 1.0233789682388306, | |
| "learning_rate": 5.2736398963730575e-06, | |
| "loss": 0.0115, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 2.055122828040743, | |
| "grad_norm": 1.594497561454773, | |
| "learning_rate": 5.257448186528498e-06, | |
| "loss": 0.0106, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 2.061114439784302, | |
| "grad_norm": 2.3627138137817383, | |
| "learning_rate": 5.241256476683938e-06, | |
| "loss": 0.0116, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 2.067106051527861, | |
| "grad_norm": 0.5149250030517578, | |
| "learning_rate": 5.225064766839378e-06, | |
| "loss": 0.0171, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 2.07309766327142, | |
| "grad_norm": 0.6787377595901489, | |
| "learning_rate": 5.208873056994819e-06, | |
| "loss": 0.0129, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 2.079089275014979, | |
| "grad_norm": 0.44210371375083923, | |
| "learning_rate": 5.192681347150259e-06, | |
| "loss": 0.0079, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 2.085080886758538, | |
| "grad_norm": 1.6856039762496948, | |
| "learning_rate": 5.176489637305699e-06, | |
| "loss": 0.0136, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 2.091072498502097, | |
| "grad_norm": 1.147712230682373, | |
| "learning_rate": 5.16029792746114e-06, | |
| "loss": 0.0084, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 2.097064110245656, | |
| "grad_norm": 0.7007279396057129, | |
| "learning_rate": 5.144106217616581e-06, | |
| "loss": 0.0143, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 2.103055721989215, | |
| "grad_norm": 1.0783849954605103, | |
| "learning_rate": 5.127914507772021e-06, | |
| "loss": 0.015, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 2.109047333732774, | |
| "grad_norm": 1.133612036705017, | |
| "learning_rate": 5.111722797927462e-06, | |
| "loss": 0.0117, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 2.115038945476333, | |
| "grad_norm": 0.5530627965927124, | |
| "learning_rate": 5.095531088082902e-06, | |
| "loss": 0.0098, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 2.121030557219892, | |
| "grad_norm": 1.3948954343795776, | |
| "learning_rate": 5.0793393782383425e-06, | |
| "loss": 0.009, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 2.127022168963451, | |
| "grad_norm": 1.2842837572097778, | |
| "learning_rate": 5.063147668393783e-06, | |
| "loss": 0.0117, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 2.13301378070701, | |
| "grad_norm": 0.8959969878196716, | |
| "learning_rate": 5.046955958549224e-06, | |
| "loss": 0.0063, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 2.139005392450569, | |
| "grad_norm": 0.8143625855445862, | |
| "learning_rate": 5.030764248704664e-06, | |
| "loss": 0.0087, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 2.144997004194128, | |
| "grad_norm": 1.388192057609558, | |
| "learning_rate": 5.014572538860104e-06, | |
| "loss": 0.0148, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 2.1509886159376874, | |
| "grad_norm": 1.5829776525497437, | |
| "learning_rate": 4.998380829015545e-06, | |
| "loss": 0.0129, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 2.156980227681246, | |
| "grad_norm": 1.3284603357315063, | |
| "learning_rate": 4.982189119170985e-06, | |
| "loss": 0.0161, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 2.1629718394248054, | |
| "grad_norm": 0.4669111371040344, | |
| "learning_rate": 4.965997409326425e-06, | |
| "loss": 0.0093, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 2.168963451168364, | |
| "grad_norm": 2.0282106399536133, | |
| "learning_rate": 4.949805699481865e-06, | |
| "loss": 0.0107, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 2.1749550629119234, | |
| "grad_norm": 1.9077093601226807, | |
| "learning_rate": 4.933613989637306e-06, | |
| "loss": 0.0099, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 2.180946674655482, | |
| "grad_norm": 0.4548993408679962, | |
| "learning_rate": 4.917422279792747e-06, | |
| "loss": 0.0086, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 2.1869382863990414, | |
| "grad_norm": 1.3280978202819824, | |
| "learning_rate": 4.901230569948187e-06, | |
| "loss": 0.0119, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 2.1929298981426, | |
| "grad_norm": 0.618032693862915, | |
| "learning_rate": 4.8850388601036275e-06, | |
| "loss": 0.01, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 2.1989215098861594, | |
| "grad_norm": 1.7678560018539429, | |
| "learning_rate": 4.868847150259068e-06, | |
| "loss": 0.0163, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 2.204913121629718, | |
| "grad_norm": 1.3157169818878174, | |
| "learning_rate": 4.8526554404145086e-06, | |
| "loss": 0.0117, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 2.2109047333732774, | |
| "grad_norm": 2.100522518157959, | |
| "learning_rate": 4.836463730569949e-06, | |
| "loss": 0.0082, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 2.2168963451168366, | |
| "grad_norm": 1.703456163406372, | |
| "learning_rate": 4.820272020725389e-06, | |
| "loss": 0.0082, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 2.2228879568603954, | |
| "grad_norm": 1.569329023361206, | |
| "learning_rate": 4.804080310880829e-06, | |
| "loss": 0.0119, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 2.2288795686039546, | |
| "grad_norm": 1.2286568880081177, | |
| "learning_rate": 4.78788860103627e-06, | |
| "loss": 0.0109, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 2.2348711803475134, | |
| "grad_norm": 1.1602758169174194, | |
| "learning_rate": 4.77169689119171e-06, | |
| "loss": 0.0088, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 2.2408627920910726, | |
| "grad_norm": 1.0114362239837646, | |
| "learning_rate": 4.755505181347151e-06, | |
| "loss": 0.0085, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 2.2468544038346314, | |
| "grad_norm": 1.3842345476150513, | |
| "learning_rate": 4.739313471502591e-06, | |
| "loss": 0.0179, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 2.2528460155781906, | |
| "grad_norm": 1.3407899141311646, | |
| "learning_rate": 4.723121761658031e-06, | |
| "loss": 0.0119, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 2.2588376273217494, | |
| "grad_norm": 1.4571481943130493, | |
| "learning_rate": 4.706930051813472e-06, | |
| "loss": 0.0093, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 2.2648292390653086, | |
| "grad_norm": 0.9713364243507385, | |
| "learning_rate": 4.690738341968912e-06, | |
| "loss": 0.0098, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 2.2708208508088674, | |
| "grad_norm": 1.1515699625015259, | |
| "learning_rate": 4.6745466321243525e-06, | |
| "loss": 0.0088, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 2.2768124625524266, | |
| "grad_norm": 1.1340457201004028, | |
| "learning_rate": 4.658354922279793e-06, | |
| "loss": 0.0121, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 2.282804074295986, | |
| "grad_norm": 1.0477025508880615, | |
| "learning_rate": 4.642163212435234e-06, | |
| "loss": 0.0107, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 2.2887956860395446, | |
| "grad_norm": 1.2503265142440796, | |
| "learning_rate": 4.625971502590674e-06, | |
| "loss": 0.0108, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 2.2947872977831034, | |
| "grad_norm": 0.959955096244812, | |
| "learning_rate": 4.609779792746114e-06, | |
| "loss": 0.0115, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 2.3007789095266626, | |
| "grad_norm": 6.084997177124023, | |
| "learning_rate": 4.593588082901555e-06, | |
| "loss": 0.0175, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 2.306770521270222, | |
| "grad_norm": 1.329868197441101, | |
| "learning_rate": 4.577396373056995e-06, | |
| "loss": 0.0104, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 2.3127621330137806, | |
| "grad_norm": 0.42435407638549805, | |
| "learning_rate": 4.561204663212436e-06, | |
| "loss": 0.0075, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 2.31875374475734, | |
| "grad_norm": 0.8225539922714233, | |
| "learning_rate": 4.545012953367876e-06, | |
| "loss": 0.0084, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 2.3247453565008986, | |
| "grad_norm": 1.0772991180419922, | |
| "learning_rate": 4.528821243523316e-06, | |
| "loss": 0.0098, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 2.330736968244458, | |
| "grad_norm": 1.085127592086792, | |
| "learning_rate": 4.512629533678756e-06, | |
| "loss": 0.0092, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 2.3367285799880166, | |
| "grad_norm": 1.8185174465179443, | |
| "learning_rate": 4.496437823834197e-06, | |
| "loss": 0.0095, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 2.342720191731576, | |
| "grad_norm": 1.0053589344024658, | |
| "learning_rate": 4.4802461139896375e-06, | |
| "loss": 0.0144, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 2.3487118034751346, | |
| "grad_norm": 2.0738162994384766, | |
| "learning_rate": 4.464054404145078e-06, | |
| "loss": 0.0166, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 2.354703415218694, | |
| "grad_norm": 1.0764700174331665, | |
| "learning_rate": 4.447862694300519e-06, | |
| "loss": 0.0096, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 2.3606950269622526, | |
| "grad_norm": 2.6137382984161377, | |
| "learning_rate": 4.431670984455959e-06, | |
| "loss": 0.0117, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 2.366686638705812, | |
| "grad_norm": 1.5871500968933105, | |
| "learning_rate": 4.4154792746114e-06, | |
| "loss": 0.0085, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 2.372678250449371, | |
| "grad_norm": 1.3385639190673828, | |
| "learning_rate": 4.39928756476684e-06, | |
| "loss": 0.0102, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 2.37866986219293, | |
| "grad_norm": 1.6860779523849487, | |
| "learning_rate": 4.38309585492228e-06, | |
| "loss": 0.0136, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 2.384661473936489, | |
| "grad_norm": 0.7180661559104919, | |
| "learning_rate": 4.366904145077721e-06, | |
| "loss": 0.0082, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 2.390653085680048, | |
| "grad_norm": 2.4058666229248047, | |
| "learning_rate": 4.350712435233161e-06, | |
| "loss": 0.0119, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 2.396644697423607, | |
| "grad_norm": 1.4926502704620361, | |
| "learning_rate": 4.334520725388601e-06, | |
| "loss": 0.0162, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.396644697423607, | |
| "eval_loss": 0.028091730549931526, | |
| "eval_runtime": 4824.2853, | |
| "eval_samples_per_second": 2.767, | |
| "eval_steps_per_second": 0.346, | |
| "eval_wer": 2.031379968359076, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.402636309167166, | |
| "grad_norm": 1.6941267251968384, | |
| "learning_rate": 4.318329015544041e-06, | |
| "loss": 0.0078, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 2.408627920910725, | |
| "grad_norm": 1.3402225971221924, | |
| "learning_rate": 4.302137305699482e-06, | |
| "loss": 0.0101, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 2.414619532654284, | |
| "grad_norm": 0.5281608700752258, | |
| "learning_rate": 4.2859455958549225e-06, | |
| "loss": 0.0131, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 2.420611144397843, | |
| "grad_norm": 2.264026165008545, | |
| "learning_rate": 4.2697538860103634e-06, | |
| "loss": 0.0116, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 2.426602756141402, | |
| "grad_norm": 1.3064323663711548, | |
| "learning_rate": 4.2535621761658036e-06, | |
| "loss": 0.0106, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 2.432594367884961, | |
| "grad_norm": 2.6873083114624023, | |
| "learning_rate": 4.237370466321244e-06, | |
| "loss": 0.0096, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 2.4385859796285203, | |
| "grad_norm": 1.4738309383392334, | |
| "learning_rate": 4.221178756476685e-06, | |
| "loss": 0.0113, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 2.444577591372079, | |
| "grad_norm": 1.6955463886260986, | |
| "learning_rate": 4.204987046632125e-06, | |
| "loss": 0.0116, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 2.450569203115638, | |
| "grad_norm": 1.9549095630645752, | |
| "learning_rate": 4.188795336787565e-06, | |
| "loss": 0.0075, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 2.456560814859197, | |
| "grad_norm": 0.9251440763473511, | |
| "learning_rate": 4.172603626943005e-06, | |
| "loss": 0.0175, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 2.4625524266027563, | |
| "grad_norm": 1.8292852640151978, | |
| "learning_rate": 4.156411917098446e-06, | |
| "loss": 0.0096, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 2.468544038346315, | |
| "grad_norm": 1.5289655923843384, | |
| "learning_rate": 4.140220207253887e-06, | |
| "loss": 0.0099, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 2.4745356500898743, | |
| "grad_norm": 1.8257941007614136, | |
| "learning_rate": 4.124028497409327e-06, | |
| "loss": 0.009, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 2.480527261833433, | |
| "grad_norm": 0.9446517825126648, | |
| "learning_rate": 4.107836787564767e-06, | |
| "loss": 0.0086, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 2.4865188735769923, | |
| "grad_norm": 0.4146974980831146, | |
| "learning_rate": 4.091645077720207e-06, | |
| "loss": 0.0083, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 2.492510485320551, | |
| "grad_norm": 1.946299433708191, | |
| "learning_rate": 4.075453367875648e-06, | |
| "loss": 0.01, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 2.4985020970641103, | |
| "grad_norm": 1.8347378969192505, | |
| "learning_rate": 4.0592616580310885e-06, | |
| "loss": 0.0143, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 2.5044937088076695, | |
| "grad_norm": 1.665825366973877, | |
| "learning_rate": 4.043069948186529e-06, | |
| "loss": 0.0093, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 2.5104853205512283, | |
| "grad_norm": 1.4112359285354614, | |
| "learning_rate": 4.026878238341969e-06, | |
| "loss": 0.0102, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 2.516476932294787, | |
| "grad_norm": 1.356748342514038, | |
| "learning_rate": 4.01068652849741e-06, | |
| "loss": 0.0097, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 2.5224685440383463, | |
| "grad_norm": 0.9731910228729248, | |
| "learning_rate": 3.99449481865285e-06, | |
| "loss": 0.0073, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 2.5284601557819055, | |
| "grad_norm": 1.8419386148452759, | |
| "learning_rate": 3.978303108808291e-06, | |
| "loss": 0.0116, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 2.5344517675254643, | |
| "grad_norm": 1.8927839994430542, | |
| "learning_rate": 3.962111398963731e-06, | |
| "loss": 0.0106, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 2.5404433792690235, | |
| "grad_norm": 2.0282535552978516, | |
| "learning_rate": 3.945919689119171e-06, | |
| "loss": 0.0119, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 2.5464349910125823, | |
| "grad_norm": 1.7558772563934326, | |
| "learning_rate": 3.929727979274612e-06, | |
| "loss": 0.0139, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 2.5524266027561415, | |
| "grad_norm": 1.7342463731765747, | |
| "learning_rate": 3.913536269430052e-06, | |
| "loss": 0.0128, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 2.5584182144997003, | |
| "grad_norm": 1.9559416770935059, | |
| "learning_rate": 3.897344559585492e-06, | |
| "loss": 0.009, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 2.5644098262432595, | |
| "grad_norm": 1.2850106954574585, | |
| "learning_rate": 3.8811528497409325e-06, | |
| "loss": 0.0114, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 2.5704014379868183, | |
| "grad_norm": 1.7029110193252563, | |
| "learning_rate": 3.8649611398963735e-06, | |
| "loss": 0.0087, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 2.5763930497303775, | |
| "grad_norm": 0.5204328298568726, | |
| "learning_rate": 3.848769430051814e-06, | |
| "loss": 0.0103, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 2.5823846614739363, | |
| "grad_norm": 1.2914999723434448, | |
| "learning_rate": 3.832577720207254e-06, | |
| "loss": 0.0094, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 2.5883762732174955, | |
| "grad_norm": 1.0208795070648193, | |
| "learning_rate": 3.816386010362695e-06, | |
| "loss": 0.0111, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 2.5943678849610547, | |
| "grad_norm": 1.2187551259994507, | |
| "learning_rate": 3.8001943005181353e-06, | |
| "loss": 0.0097, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 2.6003594967046135, | |
| "grad_norm": 1.391621470451355, | |
| "learning_rate": 3.7840025906735754e-06, | |
| "loss": 0.0103, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 2.6063511084481723, | |
| "grad_norm": 0.5924749970436096, | |
| "learning_rate": 3.767810880829016e-06, | |
| "loss": 0.01, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 2.6123427201917315, | |
| "grad_norm": 1.4632152318954468, | |
| "learning_rate": 3.751619170984456e-06, | |
| "loss": 0.0088, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 2.6183343319352907, | |
| "grad_norm": 0.8257956504821777, | |
| "learning_rate": 3.7354274611398966e-06, | |
| "loss": 0.0109, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 2.6243259436788495, | |
| "grad_norm": 1.0845006704330444, | |
| "learning_rate": 3.719235751295337e-06, | |
| "loss": 0.0081, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 2.6303175554224087, | |
| "grad_norm": 0.8061496019363403, | |
| "learning_rate": 3.7030440414507773e-06, | |
| "loss": 0.0108, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 2.6363091671659675, | |
| "grad_norm": 1.314125418663025, | |
| "learning_rate": 3.686852331606218e-06, | |
| "loss": 0.0123, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 2.6423007789095267, | |
| "grad_norm": 0.8642023205757141, | |
| "learning_rate": 3.6706606217616584e-06, | |
| "loss": 0.0119, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 2.6482923906530855, | |
| "grad_norm": 0.5751823782920837, | |
| "learning_rate": 3.654468911917099e-06, | |
| "loss": 0.007, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 2.6542840023966447, | |
| "grad_norm": 2.3470287322998047, | |
| "learning_rate": 3.638277202072539e-06, | |
| "loss": 0.0128, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 2.660275614140204, | |
| "grad_norm": 1.6865290403366089, | |
| "learning_rate": 3.6220854922279797e-06, | |
| "loss": 0.0109, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 2.6662672258837627, | |
| "grad_norm": 0.8434157371520996, | |
| "learning_rate": 3.6058937823834202e-06, | |
| "loss": 0.0081, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 2.6722588376273215, | |
| "grad_norm": 0.8960549831390381, | |
| "learning_rate": 3.5897020725388604e-06, | |
| "loss": 0.0105, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 2.6782504493708807, | |
| "grad_norm": 1.3118284940719604, | |
| "learning_rate": 3.573510362694301e-06, | |
| "loss": 0.0119, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 2.68424206111444, | |
| "grad_norm": 0.5671843886375427, | |
| "learning_rate": 3.557318652849741e-06, | |
| "loss": 0.0094, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 2.6902336728579987, | |
| "grad_norm": 1.562156319618225, | |
| "learning_rate": 3.5411269430051816e-06, | |
| "loss": 0.012, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 2.696225284601558, | |
| "grad_norm": 0.6534112691879272, | |
| "learning_rate": 3.5249352331606217e-06, | |
| "loss": 0.0162, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 2.7022168963451167, | |
| "grad_norm": 1.0599844455718994, | |
| "learning_rate": 3.5087435233160627e-06, | |
| "loss": 0.0094, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 2.708208508088676, | |
| "grad_norm": 0.5087442398071289, | |
| "learning_rate": 3.4925518134715033e-06, | |
| "loss": 0.0121, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 2.7142001198322347, | |
| "grad_norm": 1.2679471969604492, | |
| "learning_rate": 3.4763601036269434e-06, | |
| "loss": 0.0101, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 2.720191731575794, | |
| "grad_norm": 2.700209379196167, | |
| "learning_rate": 3.460168393782384e-06, | |
| "loss": 0.0102, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 2.7261833433193527, | |
| "grad_norm": 1.5533713102340698, | |
| "learning_rate": 3.443976683937824e-06, | |
| "loss": 0.0107, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 2.732174955062912, | |
| "grad_norm": 0.9379343390464783, | |
| "learning_rate": 3.4277849740932646e-06, | |
| "loss": 0.0095, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 2.7381665668064707, | |
| "grad_norm": 2.140592575073242, | |
| "learning_rate": 3.4115932642487048e-06, | |
| "loss": 0.0098, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 2.74415817855003, | |
| "grad_norm": 1.169646143913269, | |
| "learning_rate": 3.3954015544041453e-06, | |
| "loss": 0.0085, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 2.750149790293589, | |
| "grad_norm": 0.7018334865570068, | |
| "learning_rate": 3.3792098445595854e-06, | |
| "loss": 0.0099, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 2.756141402037148, | |
| "grad_norm": 2.107804775238037, | |
| "learning_rate": 3.363018134715026e-06, | |
| "loss": 0.0103, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 2.7621330137807067, | |
| "grad_norm": 1.43622624874115, | |
| "learning_rate": 3.346826424870467e-06, | |
| "loss": 0.0087, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 2.768124625524266, | |
| "grad_norm": 1.502923846244812, | |
| "learning_rate": 3.330634715025907e-06, | |
| "loss": 0.011, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 2.774116237267825, | |
| "grad_norm": 1.7550749778747559, | |
| "learning_rate": 3.3144430051813477e-06, | |
| "loss": 0.0154, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 2.780107849011384, | |
| "grad_norm": 1.826146125793457, | |
| "learning_rate": 3.2982512953367878e-06, | |
| "loss": 0.0117, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 2.786099460754943, | |
| "grad_norm": 1.9726802110671997, | |
| "learning_rate": 3.2820595854922283e-06, | |
| "loss": 0.0108, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 2.792091072498502, | |
| "grad_norm": 1.804656744003296, | |
| "learning_rate": 3.2658678756476685e-06, | |
| "loss": 0.0094, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 2.798082684242061, | |
| "grad_norm": 0.8989260792732239, | |
| "learning_rate": 3.249676165803109e-06, | |
| "loss": 0.0118, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 2.80407429598562, | |
| "grad_norm": 0.6879564523696899, | |
| "learning_rate": 3.233484455958549e-06, | |
| "loss": 0.0095, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 2.810065907729179, | |
| "grad_norm": 3.1088714599609375, | |
| "learning_rate": 3.2172927461139897e-06, | |
| "loss": 0.0114, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 2.8160575194727384, | |
| "grad_norm": 1.4156396389007568, | |
| "learning_rate": 3.2011010362694307e-06, | |
| "loss": 0.0094, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 2.822049131216297, | |
| "grad_norm": 1.6527442932128906, | |
| "learning_rate": 3.184909326424871e-06, | |
| "loss": 0.0127, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 2.828040742959856, | |
| "grad_norm": 2.5836308002471924, | |
| "learning_rate": 3.1687176165803114e-06, | |
| "loss": 0.0093, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 2.834032354703415, | |
| "grad_norm": 0.9800975322723389, | |
| "learning_rate": 3.1525259067357515e-06, | |
| "loss": 0.0093, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 2.8400239664469744, | |
| "grad_norm": 1.5359934568405151, | |
| "learning_rate": 3.136334196891192e-06, | |
| "loss": 0.0096, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 2.846015578190533, | |
| "grad_norm": 0.9235589504241943, | |
| "learning_rate": 3.120142487046632e-06, | |
| "loss": 0.0087, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 2.8520071899340924, | |
| "grad_norm": 0.276102751493454, | |
| "learning_rate": 3.1039507772020727e-06, | |
| "loss": 0.0103, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 2.857998801677651, | |
| "grad_norm": 1.072479009628296, | |
| "learning_rate": 3.087759067357513e-06, | |
| "loss": 0.0109, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 2.8639904134212104, | |
| "grad_norm": 0.30748701095581055, | |
| "learning_rate": 3.0715673575129534e-06, | |
| "loss": 0.0105, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 2.869982025164769, | |
| "grad_norm": 1.4529056549072266, | |
| "learning_rate": 3.055375647668394e-06, | |
| "loss": 0.0081, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 2.8759736369083284, | |
| "grad_norm": 0.8904408812522888, | |
| "learning_rate": 3.0391839378238345e-06, | |
| "loss": 0.0063, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 2.8819652486518876, | |
| "grad_norm": 1.0053412914276123, | |
| "learning_rate": 3.022992227979275e-06, | |
| "loss": 0.0074, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 2.8879568603954464, | |
| "grad_norm": 1.1586447954177856, | |
| "learning_rate": 3.0068005181347152e-06, | |
| "loss": 0.0108, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 2.893948472139005, | |
| "grad_norm": 0.35142049193382263, | |
| "learning_rate": 2.9906088082901558e-06, | |
| "loss": 0.0121, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 2.8999400838825644, | |
| "grad_norm": 0.6938487887382507, | |
| "learning_rate": 2.9744170984455963e-06, | |
| "loss": 0.0065, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 2.9059316956261236, | |
| "grad_norm": 0.7269591093063354, | |
| "learning_rate": 2.9582253886010365e-06, | |
| "loss": 0.0087, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 2.9119233073696824, | |
| "grad_norm": 0.7914448976516724, | |
| "learning_rate": 2.942033678756477e-06, | |
| "loss": 0.0121, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 2.917914919113241, | |
| "grad_norm": 1.7493529319763184, | |
| "learning_rate": 2.925841968911917e-06, | |
| "loss": 0.0076, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 2.9239065308568004, | |
| "grad_norm": 1.1263381242752075, | |
| "learning_rate": 2.9096502590673577e-06, | |
| "loss": 0.0076, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 2.9298981426003596, | |
| "grad_norm": 0.5138715505599976, | |
| "learning_rate": 2.893458549222798e-06, | |
| "loss": 0.0075, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 2.9358897543439184, | |
| "grad_norm": 1.6166647672653198, | |
| "learning_rate": 2.877266839378239e-06, | |
| "loss": 0.0097, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 2.9418813660874776, | |
| "grad_norm": 1.124442219734192, | |
| "learning_rate": 2.8610751295336794e-06, | |
| "loss": 0.0081, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 2.9478729778310364, | |
| "grad_norm": 1.6605931520462036, | |
| "learning_rate": 2.8448834196891195e-06, | |
| "loss": 0.0119, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 2.9538645895745956, | |
| "grad_norm": 0.45472681522369385, | |
| "learning_rate": 2.82869170984456e-06, | |
| "loss": 0.015, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 2.9598562013181544, | |
| "grad_norm": 1.1341872215270996, | |
| "learning_rate": 2.8125e-06, | |
| "loss": 0.0107, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 2.9658478130617136, | |
| "grad_norm": 1.1641947031021118, | |
| "learning_rate": 2.7963082901554407e-06, | |
| "loss": 0.0121, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 2.971839424805273, | |
| "grad_norm": 1.928467035293579, | |
| "learning_rate": 2.780116580310881e-06, | |
| "loss": 0.009, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 2.9778310365488316, | |
| "grad_norm": 0.6145504117012024, | |
| "learning_rate": 2.7639248704663214e-06, | |
| "loss": 0.0099, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 2.9838226482923904, | |
| "grad_norm": 1.6045176982879639, | |
| "learning_rate": 2.7477331606217615e-06, | |
| "loss": 0.0139, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 2.9898142600359496, | |
| "grad_norm": 1.8572264909744263, | |
| "learning_rate": 2.7315414507772025e-06, | |
| "loss": 0.0092, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 2.995805871779509, | |
| "grad_norm": 0.7941797375679016, | |
| "learning_rate": 2.715349740932643e-06, | |
| "loss": 0.0089, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 3.0017974835230676, | |
| "grad_norm": 0.1768355667591095, | |
| "learning_rate": 2.699158031088083e-06, | |
| "loss": 0.0106, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 3.007789095266627, | |
| "grad_norm": 0.34053680300712585, | |
| "learning_rate": 2.6829663212435238e-06, | |
| "loss": 0.0039, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 3.0137807070101856, | |
| "grad_norm": 0.35552066564559937, | |
| "learning_rate": 2.666774611398964e-06, | |
| "loss": 0.0027, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 3.019772318753745, | |
| "grad_norm": 0.289671391248703, | |
| "learning_rate": 2.6505829015544044e-06, | |
| "loss": 0.003, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 3.0257639304973036, | |
| "grad_norm": 0.47817885875701904, | |
| "learning_rate": 2.6343911917098446e-06, | |
| "loss": 0.0037, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 3.031755542240863, | |
| "grad_norm": 0.28214651346206665, | |
| "learning_rate": 2.618199481865285e-06, | |
| "loss": 0.0027, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 3.0377471539844216, | |
| "grad_norm": 0.34519505500793457, | |
| "learning_rate": 2.6020077720207253e-06, | |
| "loss": 0.0048, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 3.043738765727981, | |
| "grad_norm": 0.39826449751853943, | |
| "learning_rate": 2.585816062176166e-06, | |
| "loss": 0.0041, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 3.0497303774715396, | |
| "grad_norm": 0.3142577111721039, | |
| "learning_rate": 2.569624352331607e-06, | |
| "loss": 0.0041, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 3.055721989215099, | |
| "grad_norm": 0.2117956429719925, | |
| "learning_rate": 2.553432642487047e-06, | |
| "loss": 0.0028, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 3.061713600958658, | |
| "grad_norm": 0.21606473624706268, | |
| "learning_rate": 2.5372409326424875e-06, | |
| "loss": 0.0018, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 3.067705212702217, | |
| "grad_norm": 0.09918711334466934, | |
| "learning_rate": 2.5210492227979276e-06, | |
| "loss": 0.0026, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 3.073696824445776, | |
| "grad_norm": 0.12298409640789032, | |
| "learning_rate": 2.504857512953368e-06, | |
| "loss": 0.0025, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 3.079688436189335, | |
| "grad_norm": 0.5131508111953735, | |
| "learning_rate": 2.4886658031088083e-06, | |
| "loss": 0.0026, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 3.085680047932894, | |
| "grad_norm": 0.43076765537261963, | |
| "learning_rate": 2.472474093264249e-06, | |
| "loss": 0.0044, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 3.091671659676453, | |
| "grad_norm": 0.2750474810600281, | |
| "learning_rate": 2.4562823834196894e-06, | |
| "loss": 0.0019, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 3.097663271420012, | |
| "grad_norm": 0.2277926802635193, | |
| "learning_rate": 2.44009067357513e-06, | |
| "loss": 0.0026, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 3.103654883163571, | |
| "grad_norm": 0.29656535387039185, | |
| "learning_rate": 2.42389896373057e-06, | |
| "loss": 0.0029, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 3.10964649490713, | |
| "grad_norm": 0.17487123608589172, | |
| "learning_rate": 2.4077072538860106e-06, | |
| "loss": 0.0023, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 3.115638106650689, | |
| "grad_norm": 0.3185974955558777, | |
| "learning_rate": 2.3915155440414508e-06, | |
| "loss": 0.0028, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 3.121629718394248, | |
| "grad_norm": 0.42135900259017944, | |
| "learning_rate": 2.3753238341968913e-06, | |
| "loss": 0.0027, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 3.1276213301378073, | |
| "grad_norm": 0.2360919862985611, | |
| "learning_rate": 2.359132124352332e-06, | |
| "loss": 0.0024, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 3.133612941881366, | |
| "grad_norm": 0.21785879135131836, | |
| "learning_rate": 2.342940414507772e-06, | |
| "loss": 0.0016, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 3.1396045536249253, | |
| "grad_norm": 0.16767416894435883, | |
| "learning_rate": 2.3267487046632126e-06, | |
| "loss": 0.0035, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 3.145596165368484, | |
| "grad_norm": 0.5397895574569702, | |
| "learning_rate": 2.310556994818653e-06, | |
| "loss": 0.0033, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 3.1515877771120433, | |
| "grad_norm": 0.30774328112602234, | |
| "learning_rate": 2.2943652849740937e-06, | |
| "loss": 0.0029, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 3.157579388855602, | |
| "grad_norm": 0.6457645297050476, | |
| "learning_rate": 2.278173575129534e-06, | |
| "loss": 0.0033, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 3.1635710005991613, | |
| "grad_norm": 0.19459593296051025, | |
| "learning_rate": 2.2619818652849744e-06, | |
| "loss": 0.0037, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 3.16956261234272, | |
| "grad_norm": 0.28499457240104675, | |
| "learning_rate": 2.2457901554404145e-06, | |
| "loss": 0.0036, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 3.1755542240862793, | |
| "grad_norm": 0.1872144639492035, | |
| "learning_rate": 2.229598445595855e-06, | |
| "loss": 0.0029, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 3.181545835829838, | |
| "grad_norm": 0.1760193556547165, | |
| "learning_rate": 2.2134067357512956e-06, | |
| "loss": 0.0032, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 3.1875374475733973, | |
| "grad_norm": 1.5843597650527954, | |
| "learning_rate": 2.197215025906736e-06, | |
| "loss": 0.0053, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 3.193529059316956, | |
| "grad_norm": 0.2119935303926468, | |
| "learning_rate": 2.1810233160621763e-06, | |
| "loss": 0.0027, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 3.1995206710605153, | |
| "grad_norm": 0.8417574763298035, | |
| "learning_rate": 2.164831606217617e-06, | |
| "loss": 0.0035, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 3.205512282804074, | |
| "grad_norm": 0.14911438524723053, | |
| "learning_rate": 2.148639896373057e-06, | |
| "loss": 0.0028, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 3.2115038945476333, | |
| "grad_norm": 0.2696787416934967, | |
| "learning_rate": 2.1324481865284975e-06, | |
| "loss": 0.0036, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 3.2174955062911925, | |
| "grad_norm": 1.2951834201812744, | |
| "learning_rate": 2.116256476683938e-06, | |
| "loss": 0.0031, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 3.2234871180347513, | |
| "grad_norm": 0.3151995539665222, | |
| "learning_rate": 2.100064766839378e-06, | |
| "loss": 0.0021, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 3.2294787297783105, | |
| "grad_norm": 0.18740902841091156, | |
| "learning_rate": 2.0838730569948188e-06, | |
| "loss": 0.0038, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 3.2354703415218693, | |
| "grad_norm": 0.22964604198932648, | |
| "learning_rate": 2.0676813471502593e-06, | |
| "loss": 0.0023, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 3.2414619532654285, | |
| "grad_norm": 0.18690788745880127, | |
| "learning_rate": 2.0514896373057e-06, | |
| "loss": 0.0023, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 3.2474535650089873, | |
| "grad_norm": 0.3963768780231476, | |
| "learning_rate": 2.03529792746114e-06, | |
| "loss": 0.0023, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 3.2534451767525465, | |
| "grad_norm": 0.18004104495048523, | |
| "learning_rate": 2.0191062176165806e-06, | |
| "loss": 0.002, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 3.2594367884961053, | |
| "grad_norm": 0.40362900495529175, | |
| "learning_rate": 2.0029145077720207e-06, | |
| "loss": 0.0028, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 3.2654284002396645, | |
| "grad_norm": 1.1446609497070312, | |
| "learning_rate": 1.9867227979274612e-06, | |
| "loss": 0.0045, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 3.2714200119832233, | |
| "grad_norm": 0.6077856421470642, | |
| "learning_rate": 1.970531088082902e-06, | |
| "loss": 0.0049, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 3.2774116237267825, | |
| "grad_norm": 0.4644009470939636, | |
| "learning_rate": 1.954339378238342e-06, | |
| "loss": 0.0025, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 3.2834032354703417, | |
| "grad_norm": 0.1707988828420639, | |
| "learning_rate": 1.9381476683937825e-06, | |
| "loss": 0.0039, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 3.2893948472139005, | |
| "grad_norm": 0.14093299210071564, | |
| "learning_rate": 1.921955958549223e-06, | |
| "loss": 0.0026, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 3.2953864589574597, | |
| "grad_norm": 0.14803093671798706, | |
| "learning_rate": 1.9057642487046634e-06, | |
| "loss": 0.003, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 3.3013780707010185, | |
| "grad_norm": 0.1931457668542862, | |
| "learning_rate": 1.889572538860104e-06, | |
| "loss": 0.0032, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 3.3073696824445777, | |
| "grad_norm": 0.13468466699123383, | |
| "learning_rate": 1.8733808290155443e-06, | |
| "loss": 0.0021, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 3.3133612941881365, | |
| "grad_norm": 0.1815997213125229, | |
| "learning_rate": 1.8571891191709846e-06, | |
| "loss": 0.0025, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 3.3193529059316957, | |
| "grad_norm": 0.334640771150589, | |
| "learning_rate": 1.840997409326425e-06, | |
| "loss": 0.0032, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 3.3253445176752545, | |
| "grad_norm": 0.1825876235961914, | |
| "learning_rate": 1.8248056994818655e-06, | |
| "loss": 0.0024, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 3.3313361294188137, | |
| "grad_norm": 0.4397142231464386, | |
| "learning_rate": 1.8086139896373059e-06, | |
| "loss": 0.003, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 3.3373277411623725, | |
| "grad_norm": 0.3211234509944916, | |
| "learning_rate": 1.7924222797927462e-06, | |
| "loss": 0.0016, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 3.3433193529059317, | |
| "grad_norm": 0.27260443568229675, | |
| "learning_rate": 1.7762305699481865e-06, | |
| "loss": 0.0025, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 3.349310964649491, | |
| "grad_norm": 0.5644561052322388, | |
| "learning_rate": 1.7600388601036269e-06, | |
| "loss": 0.0035, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 3.3553025763930497, | |
| "grad_norm": 0.2065039575099945, | |
| "learning_rate": 1.7438471502590676e-06, | |
| "loss": 0.0026, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 3.3612941881366085, | |
| "grad_norm": 0.23062218725681305, | |
| "learning_rate": 1.727655440414508e-06, | |
| "loss": 0.0023, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 3.3672857998801677, | |
| "grad_norm": 0.19360890984535217, | |
| "learning_rate": 1.7114637305699483e-06, | |
| "loss": 0.0019, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 3.373277411623727, | |
| "grad_norm": 0.14503487944602966, | |
| "learning_rate": 1.6952720207253887e-06, | |
| "loss": 0.0029, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 3.3792690233672857, | |
| "grad_norm": 0.6343557834625244, | |
| "learning_rate": 1.6790803108808292e-06, | |
| "loss": 0.0024, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 3.385260635110845, | |
| "grad_norm": 0.1899099200963974, | |
| "learning_rate": 1.6628886010362696e-06, | |
| "loss": 0.002, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 3.3912522468544037, | |
| "grad_norm": 0.11445264518260956, | |
| "learning_rate": 1.64669689119171e-06, | |
| "loss": 0.002, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 3.397243858597963, | |
| "grad_norm": 0.2253667116165161, | |
| "learning_rate": 1.6305051813471503e-06, | |
| "loss": 0.0053, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 3.4032354703415217, | |
| "grad_norm": 0.17985133826732635, | |
| "learning_rate": 1.6143134715025908e-06, | |
| "loss": 0.0032, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 3.409227082085081, | |
| "grad_norm": 1.3421351909637451, | |
| "learning_rate": 1.5981217616580314e-06, | |
| "loss": 0.0031, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 3.4152186938286397, | |
| "grad_norm": 0.2474287450313568, | |
| "learning_rate": 1.5819300518134717e-06, | |
| "loss": 0.0026, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 3.421210305572199, | |
| "grad_norm": 3.9031786918640137, | |
| "learning_rate": 1.565738341968912e-06, | |
| "loss": 0.0042, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 3.4272019173157577, | |
| "grad_norm": 0.2293621450662613, | |
| "learning_rate": 1.5495466321243524e-06, | |
| "loss": 0.0022, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 3.433193529059317, | |
| "grad_norm": 0.34794846177101135, | |
| "learning_rate": 1.5333549222797927e-06, | |
| "loss": 0.0036, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 3.439185140802876, | |
| "grad_norm": 0.5763067603111267, | |
| "learning_rate": 1.5171632124352335e-06, | |
| "loss": 0.0035, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 3.445176752546435, | |
| "grad_norm": 0.18455998599529266, | |
| "learning_rate": 1.5009715025906738e-06, | |
| "loss": 0.0022, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 3.451168364289994, | |
| "grad_norm": 0.21217899024486542, | |
| "learning_rate": 1.4847797927461142e-06, | |
| "loss": 0.0043, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 3.457159976033553, | |
| "grad_norm": 0.25549477338790894, | |
| "learning_rate": 1.4685880829015545e-06, | |
| "loss": 0.0025, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 3.463151587777112, | |
| "grad_norm": 0.39118197560310364, | |
| "learning_rate": 1.4523963730569949e-06, | |
| "loss": 0.0027, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 3.469143199520671, | |
| "grad_norm": 0.6235507726669312, | |
| "learning_rate": 1.4362046632124354e-06, | |
| "loss": 0.0026, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 3.47513481126423, | |
| "grad_norm": 0.16387909650802612, | |
| "learning_rate": 1.4200129533678758e-06, | |
| "loss": 0.0035, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 3.481126423007789, | |
| "grad_norm": 0.7773973941802979, | |
| "learning_rate": 1.4038212435233161e-06, | |
| "loss": 0.0034, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 3.487118034751348, | |
| "grad_norm": 0.2718408703804016, | |
| "learning_rate": 1.3876295336787565e-06, | |
| "loss": 0.0025, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 3.493109646494907, | |
| "grad_norm": 0.7201581597328186, | |
| "learning_rate": 1.3714378238341968e-06, | |
| "loss": 0.0027, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 3.499101258238466, | |
| "grad_norm": 0.25421056151390076, | |
| "learning_rate": 1.3552461139896376e-06, | |
| "loss": 0.0048, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 3.5050928699820254, | |
| "grad_norm": 0.6171629428863525, | |
| "learning_rate": 1.339054404145078e-06, | |
| "loss": 0.0041, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 3.511084481725584, | |
| "grad_norm": 0.13500405848026276, | |
| "learning_rate": 1.3228626943005182e-06, | |
| "loss": 0.0024, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 3.517076093469143, | |
| "grad_norm": 0.2934102714061737, | |
| "learning_rate": 1.3066709844559586e-06, | |
| "loss": 0.0028, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 3.523067705212702, | |
| "grad_norm": 0.2204686999320984, | |
| "learning_rate": 1.290479274611399e-06, | |
| "loss": 0.0023, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 3.5290593169562614, | |
| "grad_norm": 0.2993859052658081, | |
| "learning_rate": 1.2742875647668395e-06, | |
| "loss": 0.0033, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 3.53505092869982, | |
| "grad_norm": 0.21279947459697723, | |
| "learning_rate": 1.2580958549222798e-06, | |
| "loss": 0.002, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 3.5410425404433794, | |
| "grad_norm": 0.20848390460014343, | |
| "learning_rate": 1.2419041450777204e-06, | |
| "loss": 0.002, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 3.547034152186938, | |
| "grad_norm": 0.41110262274742126, | |
| "learning_rate": 1.2257124352331607e-06, | |
| "loss": 0.0035, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 3.5530257639304974, | |
| "grad_norm": 0.21092066168785095, | |
| "learning_rate": 1.209520725388601e-06, | |
| "loss": 0.0019, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 3.559017375674056, | |
| "grad_norm": 0.2473544329404831, | |
| "learning_rate": 1.1933290155440414e-06, | |
| "loss": 0.0036, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 3.5650089874176154, | |
| "grad_norm": 0.16353699564933777, | |
| "learning_rate": 1.177137305699482e-06, | |
| "loss": 0.0025, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 3.5710005991611746, | |
| "grad_norm": 0.12036058306694031, | |
| "learning_rate": 1.1609455958549223e-06, | |
| "loss": 0.0027, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 3.5769922109047334, | |
| "grad_norm": 0.1448412388563156, | |
| "learning_rate": 1.1447538860103629e-06, | |
| "loss": 0.0034, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 3.582983822648292, | |
| "grad_norm": 0.6672219634056091, | |
| "learning_rate": 1.1285621761658032e-06, | |
| "loss": 0.0023, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 3.5889754343918514, | |
| "grad_norm": 0.20079953968524933, | |
| "learning_rate": 1.1123704663212438e-06, | |
| "loss": 0.0029, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 3.5949670461354106, | |
| "grad_norm": 0.49300870299339294, | |
| "learning_rate": 1.096178756476684e-06, | |
| "loss": 0.0019, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 3.5949670461354106, | |
| "eval_loss": 0.024242060258984566, | |
| "eval_runtime": 4867.1869, | |
| "eval_samples_per_second": 2.743, | |
| "eval_steps_per_second": 0.343, | |
| "eval_wer": 1.6195586580333698, | |
| "step": 6000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 6676, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 4, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.540147081183232e+19, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |