| { | |
| "best_global_step": 2000, | |
| "best_metric": 3.9869195765987637, | |
| "best_model_checkpoint": "./SALAMA_NEW4/checkpoint-2000", | |
| "epoch": 1.1983223487118035, | |
| "eval_steps": 2000, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005991611743559017, | |
| "grad_norm": 6.641392230987549, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.1132, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.011983223487118035, | |
| "grad_norm": 6.161131858825684, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.149, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.017974835230677052, | |
| "grad_norm": 5.680447578430176, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.0882, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02396644697423607, | |
| "grad_norm": 3.6827754974365234, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.0995, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.029958058717795086, | |
| "grad_norm": 5.75164794921875, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.1368, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.035949670461354104, | |
| "grad_norm": 5.719022274017334, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.0938, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.041941282204913125, | |
| "grad_norm": 4.832603931427002, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.1163, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04793289394847214, | |
| "grad_norm": 5.619574546813965, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.1008, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05392450569203116, | |
| "grad_norm": 4.749862194061279, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.1126, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05991611743559017, | |
| "grad_norm": 6.061086177825928, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.1134, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0659077291791492, | |
| "grad_norm": 5.554771423339844, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.1178, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.07189934092270821, | |
| "grad_norm": 4.674679756164551, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.0758, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07789095266626722, | |
| "grad_norm": 4.852492332458496, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.0836, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.08388256440982625, | |
| "grad_norm": 5.85217809677124, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.1097, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08987417615338526, | |
| "grad_norm": 6.0412421226501465, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.0921, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.09586578789694428, | |
| "grad_norm": 5.854034423828125, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.1144, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.10185739964050329, | |
| "grad_norm": 4.274904251098633, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.1028, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.10784901138406232, | |
| "grad_norm": 4.002969741821289, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0759, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.11384062312762133, | |
| "grad_norm": 4.920216083526611, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.1074, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.11983223487118035, | |
| "grad_norm": 5.039630889892578, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.1081, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12582384661473936, | |
| "grad_norm": 5.63913631439209, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.0974, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.1318154583582984, | |
| "grad_norm": 4.254915714263916, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.1017, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.1378070701018574, | |
| "grad_norm": 5.292032241821289, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0982, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.14379868184541642, | |
| "grad_norm": 3.856678009033203, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.1015, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.14979029358897544, | |
| "grad_norm": 4.549379825592041, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.1245, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.15578190533253444, | |
| "grad_norm": 4.482922077178955, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.104, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.16177351707609347, | |
| "grad_norm": 3.305623769760132, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.0918, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1677651288196525, | |
| "grad_norm": 3.838818073272705, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.085, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.1737567405632115, | |
| "grad_norm": 4.304590702056885, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.1025, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.17974835230677053, | |
| "grad_norm": 5.107855796813965, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.1035, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.18573996405032953, | |
| "grad_norm": 4.67608118057251, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.1292, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.19173157579388855, | |
| "grad_norm": 3.5006017684936523, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.1232, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.19772318753744758, | |
| "grad_norm": 4.693627834320068, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.117, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.20371479928100658, | |
| "grad_norm": 3.6866164207458496, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.1016, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.2097064110245656, | |
| "grad_norm": 4.933455944061279, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.108, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.21569802276812464, | |
| "grad_norm": 2.64819073677063, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.1101, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.22168963451168364, | |
| "grad_norm": 5.819120407104492, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.1305, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.22768124625524266, | |
| "grad_norm": 4.504668235778809, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.0993, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.23367285799880166, | |
| "grad_norm": 3.761913776397705, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.1033, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.2396644697423607, | |
| "grad_norm": 3.8250932693481445, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.0917, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.24565608148591972, | |
| "grad_norm": 4.284841537475586, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.1012, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2516476932294787, | |
| "grad_norm": 4.682767391204834, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.0861, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2576393049730378, | |
| "grad_norm": 3.7932474613189697, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.1151, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2636309167165968, | |
| "grad_norm": 3.588685989379883, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.1062, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.2696225284601558, | |
| "grad_norm": 3.6409924030303955, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.0987, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.2756141402037148, | |
| "grad_norm": 3.630150318145752, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.0952, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.28160575194727383, | |
| "grad_norm": 3.858980417251587, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.0992, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.28759736369083283, | |
| "grad_norm": 4.108921527862549, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.1153, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.29358897543439183, | |
| "grad_norm": 3.802626848220825, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.1149, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2995805871779509, | |
| "grad_norm": 4.295905113220215, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.1118, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.3055721989215099, | |
| "grad_norm": 4.09583854675293, | |
| "learning_rate": 9.985427461139897e-06, | |
| "loss": 0.0955, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.3115638106650689, | |
| "grad_norm": 3.3187222480773926, | |
| "learning_rate": 9.969235751295337e-06, | |
| "loss": 0.1075, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.31755542240862794, | |
| "grad_norm": 8.15339183807373, | |
| "learning_rate": 9.953044041450778e-06, | |
| "loss": 0.1113, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.32354703415218694, | |
| "grad_norm": 5.273709297180176, | |
| "learning_rate": 9.936852331606218e-06, | |
| "loss": 0.1015, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.32953864589574594, | |
| "grad_norm": 3.836277484893799, | |
| "learning_rate": 9.920660621761659e-06, | |
| "loss": 0.0927, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.335530257639305, | |
| "grad_norm": 5.473774433135986, | |
| "learning_rate": 9.9044689119171e-06, | |
| "loss": 0.1045, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.341521869382864, | |
| "grad_norm": 5.008555889129639, | |
| "learning_rate": 9.888277202072539e-06, | |
| "loss": 0.1193, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.347513481126423, | |
| "grad_norm": 8.994523048400879, | |
| "learning_rate": 9.87208549222798e-06, | |
| "loss": 0.1154, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.35350509286998205, | |
| "grad_norm": 4.773490905761719, | |
| "learning_rate": 9.85589378238342e-06, | |
| "loss": 0.1222, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.35949670461354105, | |
| "grad_norm": 3.776129722595215, | |
| "learning_rate": 9.839702072538862e-06, | |
| "loss": 0.0999, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.36548831635710005, | |
| "grad_norm": 3.550184726715088, | |
| "learning_rate": 9.823510362694301e-06, | |
| "loss": 0.1012, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.37147992810065905, | |
| "grad_norm": 4.164742469787598, | |
| "learning_rate": 9.807318652849742e-06, | |
| "loss": 0.0978, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3774715398442181, | |
| "grad_norm": 3.2359657287597656, | |
| "learning_rate": 9.791126943005183e-06, | |
| "loss": 0.0781, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.3834631515877771, | |
| "grad_norm": 3.4513018131256104, | |
| "learning_rate": 9.774935233160622e-06, | |
| "loss": 0.1098, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3894547633313361, | |
| "grad_norm": 3.489149808883667, | |
| "learning_rate": 9.758743523316063e-06, | |
| "loss": 0.1316, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.39544637507489516, | |
| "grad_norm": 4.0016374588012695, | |
| "learning_rate": 9.742551813471504e-06, | |
| "loss": 0.092, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.40143798681845416, | |
| "grad_norm": 7.272388935089111, | |
| "learning_rate": 9.726360103626944e-06, | |
| "loss": 0.1273, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.40742959856201316, | |
| "grad_norm": 3.443683385848999, | |
| "learning_rate": 9.710168393782385e-06, | |
| "loss": 0.1053, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.4134212103055722, | |
| "grad_norm": 4.35030460357666, | |
| "learning_rate": 9.693976683937824e-06, | |
| "loss": 0.114, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.4194128220491312, | |
| "grad_norm": 3.6015312671661377, | |
| "learning_rate": 9.677784974093265e-06, | |
| "loss": 0.1018, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.4254044337926902, | |
| "grad_norm": 4.1978840827941895, | |
| "learning_rate": 9.661593264248706e-06, | |
| "loss": 0.1151, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4313960455362493, | |
| "grad_norm": 4.4482879638671875, | |
| "learning_rate": 9.645401554404145e-06, | |
| "loss": 0.1114, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.4373876572798083, | |
| "grad_norm": 4.560624599456787, | |
| "learning_rate": 9.629209844559586e-06, | |
| "loss": 0.1207, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4433792690233673, | |
| "grad_norm": 5.984576225280762, | |
| "learning_rate": 9.613018134715027e-06, | |
| "loss": 0.1262, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.44937088076692633, | |
| "grad_norm": 4.926331043243408, | |
| "learning_rate": 9.596826424870466e-06, | |
| "loss": 0.1041, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.45536249251048533, | |
| "grad_norm": 4.679685115814209, | |
| "learning_rate": 9.580634715025907e-06, | |
| "loss": 0.1196, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.46135410425404433, | |
| "grad_norm": 4.890758991241455, | |
| "learning_rate": 9.564443005181347e-06, | |
| "loss": 0.1145, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.46734571599760333, | |
| "grad_norm": 4.479861259460449, | |
| "learning_rate": 9.548251295336788e-06, | |
| "loss": 0.1223, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4733373277411624, | |
| "grad_norm": 4.720247745513916, | |
| "learning_rate": 9.532059585492229e-06, | |
| "loss": 0.1056, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4793289394847214, | |
| "grad_norm": 3.5392069816589355, | |
| "learning_rate": 9.51586787564767e-06, | |
| "loss": 0.0992, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.4853205512282804, | |
| "grad_norm": 3.7479641437530518, | |
| "learning_rate": 9.49967616580311e-06, | |
| "loss": 0.1007, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.49131216297183944, | |
| "grad_norm": 4.393255710601807, | |
| "learning_rate": 9.48348445595855e-06, | |
| "loss": 0.1394, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.49730377471539844, | |
| "grad_norm": 3.2709712982177734, | |
| "learning_rate": 9.467292746113991e-06, | |
| "loss": 0.0992, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.5032953864589574, | |
| "grad_norm": 4.891329288482666, | |
| "learning_rate": 9.451101036269432e-06, | |
| "loss": 0.1123, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.5092869982025164, | |
| "grad_norm": 4.337294101715088, | |
| "learning_rate": 9.434909326424871e-06, | |
| "loss": 0.0924, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.5152786099460755, | |
| "grad_norm": 3.9342563152313232, | |
| "learning_rate": 9.418717616580312e-06, | |
| "loss": 0.1099, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.5212702216896345, | |
| "grad_norm": 3.220473527908325, | |
| "learning_rate": 9.402525906735751e-06, | |
| "loss": 0.0935, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.5272618334331935, | |
| "grad_norm": 4.463641166687012, | |
| "learning_rate": 9.386334196891192e-06, | |
| "loss": 0.1101, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5332534451767525, | |
| "grad_norm": 4.686647415161133, | |
| "learning_rate": 9.370142487046633e-06, | |
| "loss": 0.1161, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5392450569203115, | |
| "grad_norm": 3.4189178943634033, | |
| "learning_rate": 9.353950777202073e-06, | |
| "loss": 0.1144, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.5452366686638705, | |
| "grad_norm": 3.407076597213745, | |
| "learning_rate": 9.337759067357514e-06, | |
| "loss": 0.0926, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.5512282804074295, | |
| "grad_norm": 5.300261974334717, | |
| "learning_rate": 9.321567357512955e-06, | |
| "loss": 0.1047, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5572198921509887, | |
| "grad_norm": 2.8152244091033936, | |
| "learning_rate": 9.305375647668394e-06, | |
| "loss": 0.1082, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5632115038945477, | |
| "grad_norm": 4.786292552947998, | |
| "learning_rate": 9.289183937823835e-06, | |
| "loss": 0.1206, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5692031156381067, | |
| "grad_norm": 3.642893075942993, | |
| "learning_rate": 9.272992227979276e-06, | |
| "loss": 0.0863, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5751947273816657, | |
| "grad_norm": 4.487881660461426, | |
| "learning_rate": 9.256800518134715e-06, | |
| "loss": 0.1171, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5811863391252247, | |
| "grad_norm": 4.288365364074707, | |
| "learning_rate": 9.240608808290156e-06, | |
| "loss": 0.1095, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5871779508687837, | |
| "grad_norm": 3.9558470249176025, | |
| "learning_rate": 9.224417098445595e-06, | |
| "loss": 0.1185, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5931695626123428, | |
| "grad_norm": 2.573941707611084, | |
| "learning_rate": 9.208225388601038e-06, | |
| "loss": 0.1157, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5991611743559018, | |
| "grad_norm": 3.7058804035186768, | |
| "learning_rate": 9.192033678756477e-06, | |
| "loss": 0.1173, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.6051527860994608, | |
| "grad_norm": 4.578528881072998, | |
| "learning_rate": 9.175841968911918e-06, | |
| "loss": 0.1049, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.6111443978430198, | |
| "grad_norm": 4.096612930297852, | |
| "learning_rate": 9.15965025906736e-06, | |
| "loss": 0.102, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.6171360095865788, | |
| "grad_norm": 2.6690211296081543, | |
| "learning_rate": 9.143458549222799e-06, | |
| "loss": 0.1219, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.6231276213301378, | |
| "grad_norm": 4.082022190093994, | |
| "learning_rate": 9.12726683937824e-06, | |
| "loss": 0.0958, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.6291192330736968, | |
| "grad_norm": 5.364941596984863, | |
| "learning_rate": 9.11107512953368e-06, | |
| "loss": 0.1152, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.6351108448172559, | |
| "grad_norm": 3.664750099182129, | |
| "learning_rate": 9.09488341968912e-06, | |
| "loss": 0.107, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.6411024565608149, | |
| "grad_norm": 4.337752342224121, | |
| "learning_rate": 9.07869170984456e-06, | |
| "loss": 0.1053, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.6470940683043739, | |
| "grad_norm": 4.239952564239502, | |
| "learning_rate": 9.0625e-06, | |
| "loss": 0.0965, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6530856800479329, | |
| "grad_norm": 3.8299310207366943, | |
| "learning_rate": 9.046308290155441e-06, | |
| "loss": 0.0995, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.6590772917914919, | |
| "grad_norm": 3.6321651935577393, | |
| "learning_rate": 9.030116580310882e-06, | |
| "loss": 0.0991, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6650689035350509, | |
| "grad_norm": 3.9516804218292236, | |
| "learning_rate": 9.013924870466321e-06, | |
| "loss": 0.1068, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.67106051527861, | |
| "grad_norm": 4.31940221786499, | |
| "learning_rate": 8.997733160621762e-06, | |
| "loss": 0.1065, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.677052127022169, | |
| "grad_norm": 4.485226154327393, | |
| "learning_rate": 8.981541450777203e-06, | |
| "loss": 0.1318, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.683043738765728, | |
| "grad_norm": 2.977614402770996, | |
| "learning_rate": 8.965349740932643e-06, | |
| "loss": 0.1111, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.689035350509287, | |
| "grad_norm": 4.644229888916016, | |
| "learning_rate": 8.949158031088084e-06, | |
| "loss": 0.1119, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.695026962252846, | |
| "grad_norm": 5.099035263061523, | |
| "learning_rate": 8.932966321243523e-06, | |
| "loss": 0.1033, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.701018573996405, | |
| "grad_norm": 3.9480855464935303, | |
| "learning_rate": 8.916774611398964e-06, | |
| "loss": 0.0895, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.7070101857399641, | |
| "grad_norm": 4.888814449310303, | |
| "learning_rate": 8.900582901554405e-06, | |
| "loss": 0.107, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.7130017974835231, | |
| "grad_norm": 2.8887240886688232, | |
| "learning_rate": 8.884391191709846e-06, | |
| "loss": 0.0974, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.7189934092270821, | |
| "grad_norm": 4.195169448852539, | |
| "learning_rate": 8.868199481865287e-06, | |
| "loss": 0.1186, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.7249850209706411, | |
| "grad_norm": 4.0084710121154785, | |
| "learning_rate": 8.852007772020726e-06, | |
| "loss": 0.1081, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.7309766327142001, | |
| "grad_norm": 3.978888511657715, | |
| "learning_rate": 8.835816062176167e-06, | |
| "loss": 0.1271, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.7369682444577591, | |
| "grad_norm": 2.9603512287139893, | |
| "learning_rate": 8.819624352331608e-06, | |
| "loss": 0.1017, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.7429598562013181, | |
| "grad_norm": 3.808750629425049, | |
| "learning_rate": 8.803432642487047e-06, | |
| "loss": 0.1119, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.7489514679448772, | |
| "grad_norm": 4.731956481933594, | |
| "learning_rate": 8.787240932642488e-06, | |
| "loss": 0.1015, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.7549430796884362, | |
| "grad_norm": 4.4244866371154785, | |
| "learning_rate": 8.771049222797927e-06, | |
| "loss": 0.1009, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.7609346914319952, | |
| "grad_norm": 5.3592305183410645, | |
| "learning_rate": 8.754857512953368e-06, | |
| "loss": 0.1175, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.7669263031755542, | |
| "grad_norm": 3.1789183616638184, | |
| "learning_rate": 8.73866580310881e-06, | |
| "loss": 0.1079, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.7729179149191132, | |
| "grad_norm": 4.719349384307861, | |
| "learning_rate": 8.722474093264249e-06, | |
| "loss": 0.1034, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7789095266626722, | |
| "grad_norm": 3.4696717262268066, | |
| "learning_rate": 8.70628238341969e-06, | |
| "loss": 0.1042, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7849011384062313, | |
| "grad_norm": 3.1757519245147705, | |
| "learning_rate": 8.69009067357513e-06, | |
| "loss": 0.0919, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7908927501497903, | |
| "grad_norm": 5.520872592926025, | |
| "learning_rate": 8.67389896373057e-06, | |
| "loss": 0.1011, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7968843618933493, | |
| "grad_norm": 4.574864387512207, | |
| "learning_rate": 8.657707253886011e-06, | |
| "loss": 0.1123, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.8028759736369083, | |
| "grad_norm": 3.809239625930786, | |
| "learning_rate": 8.641515544041452e-06, | |
| "loss": 0.1056, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.8088675853804673, | |
| "grad_norm": 4.405616283416748, | |
| "learning_rate": 8.625323834196891e-06, | |
| "loss": 0.1141, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.8148591971240263, | |
| "grad_norm": 4.816131114959717, | |
| "learning_rate": 8.609132124352332e-06, | |
| "loss": 0.1042, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.8208508088675854, | |
| "grad_norm": 4.3509602546691895, | |
| "learning_rate": 8.592940414507773e-06, | |
| "loss": 0.098, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.8268424206111444, | |
| "grad_norm": 3.7490243911743164, | |
| "learning_rate": 8.576748704663214e-06, | |
| "loss": 0.1049, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.8328340323547034, | |
| "grad_norm": 5.330031394958496, | |
| "learning_rate": 8.560556994818653e-06, | |
| "loss": 0.105, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.8388256440982624, | |
| "grad_norm": 4.027228355407715, | |
| "learning_rate": 8.544365284974094e-06, | |
| "loss": 0.1249, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.8448172558418214, | |
| "grad_norm": 4.0217790603637695, | |
| "learning_rate": 8.528173575129535e-06, | |
| "loss": 0.1259, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.8508088675853804, | |
| "grad_norm": 4.059916973114014, | |
| "learning_rate": 8.511981865284975e-06, | |
| "loss": 0.1173, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.8568004793289394, | |
| "grad_norm": 4.621293544769287, | |
| "learning_rate": 8.495790155440416e-06, | |
| "loss": 0.1037, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.8627920910724985, | |
| "grad_norm": 3.6666407585144043, | |
| "learning_rate": 8.479598445595855e-06, | |
| "loss": 0.1177, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.8687837028160575, | |
| "grad_norm": 4.930758953094482, | |
| "learning_rate": 8.463406735751296e-06, | |
| "loss": 0.1055, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8747753145596165, | |
| "grad_norm": 4.322690010070801, | |
| "learning_rate": 8.447215025906737e-06, | |
| "loss": 0.1052, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.8807669263031755, | |
| "grad_norm": 3.768627405166626, | |
| "learning_rate": 8.431023316062176e-06, | |
| "loss": 0.1099, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8867585380467345, | |
| "grad_norm": 3.582303047180176, | |
| "learning_rate": 8.414831606217617e-06, | |
| "loss": 0.1061, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.8927501497902935, | |
| "grad_norm": 3.874319076538086, | |
| "learning_rate": 8.398639896373058e-06, | |
| "loss": 0.126, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8987417615338527, | |
| "grad_norm": 4.040960311889648, | |
| "learning_rate": 8.382448186528497e-06, | |
| "loss": 0.089, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.9047333732774117, | |
| "grad_norm": 4.185450077056885, | |
| "learning_rate": 8.366256476683938e-06, | |
| "loss": 0.0955, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.9107249850209707, | |
| "grad_norm": 3.388444185256958, | |
| "learning_rate": 8.35006476683938e-06, | |
| "loss": 0.1123, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.9167165967645297, | |
| "grad_norm": 4.470240592956543, | |
| "learning_rate": 8.333873056994819e-06, | |
| "loss": 0.1213, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.9227082085080887, | |
| "grad_norm": 4.528209686279297, | |
| "learning_rate": 8.31768134715026e-06, | |
| "loss": 0.1152, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.9286998202516477, | |
| "grad_norm": 4.039801597595215, | |
| "learning_rate": 8.301489637305699e-06, | |
| "loss": 0.0907, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.9346914319952067, | |
| "grad_norm": 2.667508840560913, | |
| "learning_rate": 8.28529792746114e-06, | |
| "loss": 0.1103, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.9406830437387658, | |
| "grad_norm": 4.166356086730957, | |
| "learning_rate": 8.269106217616581e-06, | |
| "loss": 0.1034, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.9466746554823248, | |
| "grad_norm": 4.047358989715576, | |
| "learning_rate": 8.252914507772022e-06, | |
| "loss": 0.1086, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.9526662672258838, | |
| "grad_norm": 3.4767513275146484, | |
| "learning_rate": 8.236722797927463e-06, | |
| "loss": 0.0982, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.9586578789694428, | |
| "grad_norm": 4.475752830505371, | |
| "learning_rate": 8.220531088082902e-06, | |
| "loss": 0.0879, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.9646494907130018, | |
| "grad_norm": 3.3900721073150635, | |
| "learning_rate": 8.204339378238343e-06, | |
| "loss": 0.0861, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.9706411024565608, | |
| "grad_norm": 3.077188014984131, | |
| "learning_rate": 8.188147668393784e-06, | |
| "loss": 0.1024, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.9766327142001199, | |
| "grad_norm": 4.166396141052246, | |
| "learning_rate": 8.171955958549223e-06, | |
| "loss": 0.0942, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9826243259436789, | |
| "grad_norm": 2.931767463684082, | |
| "learning_rate": 8.155764248704664e-06, | |
| "loss": 0.0833, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.9886159376872379, | |
| "grad_norm": 3.9457390308380127, | |
| "learning_rate": 8.139572538860104e-06, | |
| "loss": 0.108, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.9946075494307969, | |
| "grad_norm": 4.256587505340576, | |
| "learning_rate": 8.123380829015545e-06, | |
| "loss": 0.0926, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.0005991611743559, | |
| "grad_norm": 1.9296483993530273, | |
| "learning_rate": 8.107189119170986e-06, | |
| "loss": 0.0814, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.0065907729179149, | |
| "grad_norm": 2.8497297763824463, | |
| "learning_rate": 8.090997409326425e-06, | |
| "loss": 0.0399, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.0125823846614739, | |
| "grad_norm": 1.6683580875396729, | |
| "learning_rate": 8.074805699481866e-06, | |
| "loss": 0.0465, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.0185739964050329, | |
| "grad_norm": 2.067464828491211, | |
| "learning_rate": 8.058613989637307e-06, | |
| "loss": 0.0388, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.0245656081485919, | |
| "grad_norm": 3.085991382598877, | |
| "learning_rate": 8.042422279792746e-06, | |
| "loss": 0.0389, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.030557219892151, | |
| "grad_norm": 2.972982883453369, | |
| "learning_rate": 8.026230569948187e-06, | |
| "loss": 0.0432, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.03654883163571, | |
| "grad_norm": 3.642639398574829, | |
| "learning_rate": 8.010038860103628e-06, | |
| "loss": 0.0474, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.042540443379269, | |
| "grad_norm": 3.922886848449707, | |
| "learning_rate": 7.993847150259067e-06, | |
| "loss": 0.0362, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.048532055122828, | |
| "grad_norm": 2.143171787261963, | |
| "learning_rate": 7.977655440414508e-06, | |
| "loss": 0.0453, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.054523666866387, | |
| "grad_norm": 2.2521893978118896, | |
| "learning_rate": 7.96146373056995e-06, | |
| "loss": 0.0347, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.060515278609946, | |
| "grad_norm": 2.5530147552490234, | |
| "learning_rate": 7.94527202072539e-06, | |
| "loss": 0.0321, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.066506890353505, | |
| "grad_norm": 1.3185819387435913, | |
| "learning_rate": 7.92908031088083e-06, | |
| "loss": 0.0352, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.072498502097064, | |
| "grad_norm": 4.018706321716309, | |
| "learning_rate": 7.91288860103627e-06, | |
| "loss": 0.0528, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.078490113840623, | |
| "grad_norm": 2.9235482215881348, | |
| "learning_rate": 7.896696891191711e-06, | |
| "loss": 0.0291, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.084481725584182, | |
| "grad_norm": 3.683203935623169, | |
| "learning_rate": 7.88050518134715e-06, | |
| "loss": 0.0398, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.090473337327741, | |
| "grad_norm": 7.694185256958008, | |
| "learning_rate": 7.864313471502592e-06, | |
| "loss": 0.0428, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.0964649490713, | |
| "grad_norm": 2.377105474472046, | |
| "learning_rate": 7.848121761658031e-06, | |
| "loss": 0.0474, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.102456560814859, | |
| "grad_norm": 2.613847494125366, | |
| "learning_rate": 7.831930051813472e-06, | |
| "loss": 0.0346, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.1084481725584183, | |
| "grad_norm": 2.805191993713379, | |
| "learning_rate": 7.815738341968913e-06, | |
| "loss": 0.043, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.1144397843019773, | |
| "grad_norm": 3.0600552558898926, | |
| "learning_rate": 7.799546632124352e-06, | |
| "loss": 0.0422, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.1204313960455363, | |
| "grad_norm": 1.6137948036193848, | |
| "learning_rate": 7.783354922279793e-06, | |
| "loss": 0.036, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.1264230077890953, | |
| "grad_norm": 1.492092251777649, | |
| "learning_rate": 7.767163212435234e-06, | |
| "loss": 0.0315, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.1324146195326543, | |
| "grad_norm": 2.410632610321045, | |
| "learning_rate": 7.750971502590674e-06, | |
| "loss": 0.0361, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.1384062312762133, | |
| "grad_norm": 2.9959256649017334, | |
| "learning_rate": 7.734779792746114e-06, | |
| "loss": 0.0248, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.1443978430197723, | |
| "grad_norm": 1.860141634941101, | |
| "learning_rate": 7.718588082901555e-06, | |
| "loss": 0.0337, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.1503894547633313, | |
| "grad_norm": 3.2396111488342285, | |
| "learning_rate": 7.702396373056995e-06, | |
| "loss": 0.0319, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.1563810665068903, | |
| "grad_norm": 3.930166721343994, | |
| "learning_rate": 7.686204663212436e-06, | |
| "loss": 0.0427, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.1623726782504493, | |
| "grad_norm": 2.2750892639160156, | |
| "learning_rate": 7.670012953367875e-06, | |
| "loss": 0.0417, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.1683642899940083, | |
| "grad_norm": 3.2963342666625977, | |
| "learning_rate": 7.653821243523318e-06, | |
| "loss": 0.0415, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.1743559017375673, | |
| "grad_norm": 2.719001531600952, | |
| "learning_rate": 7.637629533678757e-06, | |
| "loss": 0.0308, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.1803475134811263, | |
| "grad_norm": 3.191821813583374, | |
| "learning_rate": 7.621437823834198e-06, | |
| "loss": 0.0323, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.1863391252246855, | |
| "grad_norm": 2.4749386310577393, | |
| "learning_rate": 7.605246113989638e-06, | |
| "loss": 0.035, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.1923307369682445, | |
| "grad_norm": 2.652811050415039, | |
| "learning_rate": 7.589054404145079e-06, | |
| "loss": 0.0456, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.1983223487118035, | |
| "grad_norm": 2.6856987476348877, | |
| "learning_rate": 7.572862694300519e-06, | |
| "loss": 0.0313, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.1983223487118035, | |
| "eval_loss": 0.04815296828746796, | |
| "eval_runtime": 4874.046, | |
| "eval_samples_per_second": 2.739, | |
| "eval_steps_per_second": 0.342, | |
| "eval_wer": 3.9869195765987637, | |
| "step": 2000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 6676, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 4, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.846715693727744e+19, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |