| { | |
| "best_global_step": 6000, | |
| "best_metric": 0.7426161258464242, | |
| "best_model_checkpoint": "./SALAMA_NEW7/checkpoint-6000", | |
| "epoch": 2.396245256640703, | |
| "eval_steps": 2000, | |
| "global_step": 6000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003994407829039345, | |
| "grad_norm": 3.857668399810791, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.0481, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.00798881565807869, | |
| "grad_norm": 2.135991096496582, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.0317, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.011983223487118035, | |
| "grad_norm": 2.663736343383789, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.0313, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01597763131615738, | |
| "grad_norm": 6.300492286682129, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.0442, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.019972039145196723, | |
| "grad_norm": 3.308899164199829, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.0378, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02396644697423607, | |
| "grad_norm": 3.85744309425354, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.0387, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.027960854803275415, | |
| "grad_norm": 2.7074952125549316, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.0411, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03195526263231476, | |
| "grad_norm": 1.499977707862854, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.0373, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.035949670461354104, | |
| "grad_norm": 3.291985273361206, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.0291, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.039944078290393446, | |
| "grad_norm": 5.130978107452393, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.035, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.043938486119432796, | |
| "grad_norm": 3.7789242267608643, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.0373, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04793289394847214, | |
| "grad_norm": 1.6493456363677979, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.0375, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.05192730177751148, | |
| "grad_norm": 3.3285486698150635, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.0358, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.05592170960655083, | |
| "grad_norm": 2.768843650817871, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.0447, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05991611743559017, | |
| "grad_norm": 4.878016471862793, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.0388, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06391052526462952, | |
| "grad_norm": 4.028059959411621, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.0398, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06790493309366886, | |
| "grad_norm": 3.255234956741333, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.0355, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.07189934092270821, | |
| "grad_norm": 3.8220138549804688, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0403, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07589374875174755, | |
| "grad_norm": 1.4640872478485107, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.0385, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.07988815658078689, | |
| "grad_norm": 3.634939432144165, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.0468, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08388256440982625, | |
| "grad_norm": 3.357348918914795, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.0337, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.08787697223886559, | |
| "grad_norm": 3.1094183921813965, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.0359, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09187138006790493, | |
| "grad_norm": 5.401888847351074, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0628, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09586578789694428, | |
| "grad_norm": 3.3238024711608887, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.059, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.09986019572598362, | |
| "grad_norm": 2.741633892059326, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.0408, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10385460355502296, | |
| "grad_norm": 2.8601815700531006, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.0328, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.10784901138406232, | |
| "grad_norm": 2.5481112003326416, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.034, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.11184341921310166, | |
| "grad_norm": 2.9293053150177, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.0345, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.115837827042141, | |
| "grad_norm": 3.529905319213867, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.0361, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.11983223487118035, | |
| "grad_norm": 3.364583969116211, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.0389, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12382664270021969, | |
| "grad_norm": 4.122386455535889, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.0588, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.12782105052925904, | |
| "grad_norm": 4.145680904388428, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.0495, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1318154583582984, | |
| "grad_norm": 4.538552761077881, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.0418, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.13580986618733773, | |
| "grad_norm": 3.27231502532959, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.037, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.13980427401637707, | |
| "grad_norm": 2.733621597290039, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.0401, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.14379868184541642, | |
| "grad_norm": 3.7901926040649414, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.0414, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.14779308967445576, | |
| "grad_norm": 3.2932889461517334, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.04, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1517874975034951, | |
| "grad_norm": 2.3562254905700684, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.0424, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.15578190533253444, | |
| "grad_norm": 3.2661616802215576, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.035, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.15977631316157379, | |
| "grad_norm": 2.809990167617798, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.0404, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.16377072099061313, | |
| "grad_norm": 3.149474620819092, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.0488, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.1677651288196525, | |
| "grad_norm": 3.6923530101776123, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.0515, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.17175953664869184, | |
| "grad_norm": 2.9962356090545654, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.0416, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.17575394447773118, | |
| "grad_norm": 4.252007007598877, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.0448, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.17974835230677053, | |
| "grad_norm": 2.9716057777404785, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.0368, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.18374276013580987, | |
| "grad_norm": 3.339590072631836, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.041, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.1877371679648492, | |
| "grad_norm": 4.591104507446289, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.0469, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.19173157579388855, | |
| "grad_norm": 3.201215982437134, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.0403, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.1957259836229279, | |
| "grad_norm": 3.1253278255462646, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.0428, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.19972039145196724, | |
| "grad_norm": 2.59318208694458, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.0397, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.20371479928100658, | |
| "grad_norm": 3.2373461723327637, | |
| "learning_rate": 9.98716486023959e-06, | |
| "loss": 0.0515, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.20770920711004592, | |
| "grad_norm": 3.1935927867889404, | |
| "learning_rate": 9.972903593839133e-06, | |
| "loss": 0.0466, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.2117036149390853, | |
| "grad_norm": 3.210402011871338, | |
| "learning_rate": 9.958642327438678e-06, | |
| "loss": 0.0441, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.21569802276812464, | |
| "grad_norm": 2.792036533355713, | |
| "learning_rate": 9.944381061038221e-06, | |
| "loss": 0.0317, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.21969243059716398, | |
| "grad_norm": 1.8875095844268799, | |
| "learning_rate": 9.930119794637765e-06, | |
| "loss": 0.035, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.22368683842620332, | |
| "grad_norm": 4.238950252532959, | |
| "learning_rate": 9.91585852823731e-06, | |
| "loss": 0.0656, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.22768124625524266, | |
| "grad_norm": 3.023404359817505, | |
| "learning_rate": 9.901597261836851e-06, | |
| "loss": 0.0511, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.231675654084282, | |
| "grad_norm": 3.4290168285369873, | |
| "learning_rate": 9.887335995436396e-06, | |
| "loss": 0.0496, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.23567006191332135, | |
| "grad_norm": 4.026778697967529, | |
| "learning_rate": 9.87307472903594e-06, | |
| "loss": 0.0394, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.2396644697423607, | |
| "grad_norm": 3.315932512283325, | |
| "learning_rate": 9.858813462635483e-06, | |
| "loss": 0.044, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.24365887757140003, | |
| "grad_norm": 2.7956554889678955, | |
| "learning_rate": 9.844552196235026e-06, | |
| "loss": 0.0414, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.24765328540043938, | |
| "grad_norm": 4.850486755371094, | |
| "learning_rate": 9.83029092983457e-06, | |
| "loss": 0.0522, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.2516476932294787, | |
| "grad_norm": 3.1265628337860107, | |
| "learning_rate": 9.816029663434114e-06, | |
| "loss": 0.0358, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.2556421010585181, | |
| "grad_norm": 2.609273910522461, | |
| "learning_rate": 9.801768397033657e-06, | |
| "loss": 0.0397, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2596365088875574, | |
| "grad_norm": 4.073995590209961, | |
| "learning_rate": 9.787507130633202e-06, | |
| "loss": 0.0491, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2636309167165968, | |
| "grad_norm": 2.9083051681518555, | |
| "learning_rate": 9.773245864232744e-06, | |
| "loss": 0.034, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.2676253245456361, | |
| "grad_norm": 4.328786849975586, | |
| "learning_rate": 9.758984597832289e-06, | |
| "loss": 0.0498, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.27161973237467546, | |
| "grad_norm": 3.5342602729797363, | |
| "learning_rate": 9.744723331431832e-06, | |
| "loss": 0.0361, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.2756141402037148, | |
| "grad_norm": 1.7005605697631836, | |
| "learning_rate": 9.730462065031375e-06, | |
| "loss": 0.0512, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.27960854803275415, | |
| "grad_norm": 2.4722962379455566, | |
| "learning_rate": 9.71620079863092e-06, | |
| "loss": 0.0564, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2836029558617935, | |
| "grad_norm": 4.595683574676514, | |
| "learning_rate": 9.701939532230463e-06, | |
| "loss": 0.0394, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.28759736369083283, | |
| "grad_norm": 4.185388088226318, | |
| "learning_rate": 9.687678265830007e-06, | |
| "loss": 0.0537, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2915917715198722, | |
| "grad_norm": 2.412919044494629, | |
| "learning_rate": 9.67341699942955e-06, | |
| "loss": 0.0479, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.2955861793489115, | |
| "grad_norm": 3.0715839862823486, | |
| "learning_rate": 9.659155733029095e-06, | |
| "loss": 0.0524, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.2995805871779509, | |
| "grad_norm": 3.375519275665283, | |
| "learning_rate": 9.644894466628636e-06, | |
| "loss": 0.0555, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.3035749950069902, | |
| "grad_norm": 4.457313060760498, | |
| "learning_rate": 9.630633200228181e-06, | |
| "loss": 0.0502, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.30756940283602957, | |
| "grad_norm": 2.67417573928833, | |
| "learning_rate": 9.616371933827725e-06, | |
| "loss": 0.0367, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.3115638106650689, | |
| "grad_norm": 4.992038726806641, | |
| "learning_rate": 9.602110667427268e-06, | |
| "loss": 0.0459, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.31555821849410826, | |
| "grad_norm": 3.235741138458252, | |
| "learning_rate": 9.587849401026813e-06, | |
| "loss": 0.0363, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.31955262632314757, | |
| "grad_norm": 3.169708490371704, | |
| "learning_rate": 9.573588134626356e-06, | |
| "loss": 0.0441, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.32354703415218694, | |
| "grad_norm": 3.8259077072143555, | |
| "learning_rate": 9.5593268682259e-06, | |
| "loss": 0.0449, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.32754144198122626, | |
| "grad_norm": 2.690516948699951, | |
| "learning_rate": 9.545065601825442e-06, | |
| "loss": 0.0529, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.3315358498102656, | |
| "grad_norm": 3.342012643814087, | |
| "learning_rate": 9.530804335424987e-06, | |
| "loss": 0.0381, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.335530257639305, | |
| "grad_norm": 4.141724109649658, | |
| "learning_rate": 9.516543069024529e-06, | |
| "loss": 0.0505, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.3395246654683443, | |
| "grad_norm": 6.260738372802734, | |
| "learning_rate": 9.502281802624074e-06, | |
| "loss": 0.0466, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.3435190732973837, | |
| "grad_norm": 3.450930118560791, | |
| "learning_rate": 9.488020536223617e-06, | |
| "loss": 0.0499, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.347513481126423, | |
| "grad_norm": 3.835606813430786, | |
| "learning_rate": 9.47375926982316e-06, | |
| "loss": 0.0698, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.35150788895546237, | |
| "grad_norm": 5.289712429046631, | |
| "learning_rate": 9.459498003422705e-06, | |
| "loss": 0.0545, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.3555022967845017, | |
| "grad_norm": 3.3518259525299072, | |
| "learning_rate": 9.445236737022249e-06, | |
| "loss": 0.05, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.35949670461354105, | |
| "grad_norm": 2.9629595279693604, | |
| "learning_rate": 9.430975470621792e-06, | |
| "loss": 0.0458, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.36349111244258037, | |
| "grad_norm": 4.516363620758057, | |
| "learning_rate": 9.416714204221335e-06, | |
| "loss": 0.0505, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.36748552027161974, | |
| "grad_norm": 3.87882661819458, | |
| "learning_rate": 9.40245293782088e-06, | |
| "loss": 0.063, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.37147992810065905, | |
| "grad_norm": 5.499217987060547, | |
| "learning_rate": 9.388191671420423e-06, | |
| "loss": 0.0516, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.3754743359296984, | |
| "grad_norm": 2.694190740585327, | |
| "learning_rate": 9.373930405019966e-06, | |
| "loss": 0.0463, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.3794687437587378, | |
| "grad_norm": 3.5198814868927, | |
| "learning_rate": 9.35966913861951e-06, | |
| "loss": 0.0504, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3834631515877771, | |
| "grad_norm": 3.1238009929656982, | |
| "learning_rate": 9.345407872219053e-06, | |
| "loss": 0.0371, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3874575594168165, | |
| "grad_norm": 4.268932819366455, | |
| "learning_rate": 9.331146605818598e-06, | |
| "loss": 0.0528, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.3914519672458558, | |
| "grad_norm": 3.029754161834717, | |
| "learning_rate": 9.316885339418141e-06, | |
| "loss": 0.035, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.39544637507489516, | |
| "grad_norm": 4.07280158996582, | |
| "learning_rate": 9.302624073017684e-06, | |
| "loss": 0.0372, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.3994407829039345, | |
| "grad_norm": 2.339977741241455, | |
| "learning_rate": 9.288362806617228e-06, | |
| "loss": 0.0397, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.40343519073297385, | |
| "grad_norm": 4.685292720794678, | |
| "learning_rate": 9.274101540216773e-06, | |
| "loss": 0.0498, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.40742959856201316, | |
| "grad_norm": 3.811983346939087, | |
| "learning_rate": 9.259840273816316e-06, | |
| "loss": 0.042, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.41142400639105253, | |
| "grad_norm": 4.947136878967285, | |
| "learning_rate": 9.245579007415859e-06, | |
| "loss": 0.0509, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.41541841422009185, | |
| "grad_norm": 3.673635482788086, | |
| "learning_rate": 9.231317741015402e-06, | |
| "loss": 0.0465, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4194128220491312, | |
| "grad_norm": 4.268950462341309, | |
| "learning_rate": 9.217056474614946e-06, | |
| "loss": 0.0461, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.4234072298781706, | |
| "grad_norm": 5.4644927978515625, | |
| "learning_rate": 9.20279520821449e-06, | |
| "loss": 0.0585, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.4274016377072099, | |
| "grad_norm": 2.472513437271118, | |
| "learning_rate": 9.188533941814034e-06, | |
| "loss": 0.0397, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4313960455362493, | |
| "grad_norm": 5.088723659515381, | |
| "learning_rate": 9.174272675413579e-06, | |
| "loss": 0.0578, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.4353904533652886, | |
| "grad_norm": 5.186155796051025, | |
| "learning_rate": 9.16001140901312e-06, | |
| "loss": 0.0479, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.43938486119432796, | |
| "grad_norm": 2.81628155708313, | |
| "learning_rate": 9.145750142612665e-06, | |
| "loss": 0.0458, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.4433792690233673, | |
| "grad_norm": 2.2827396392822266, | |
| "learning_rate": 9.131488876212208e-06, | |
| "loss": 0.0395, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.44737367685240664, | |
| "grad_norm": 4.735171318054199, | |
| "learning_rate": 9.117227609811752e-06, | |
| "loss": 0.0512, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.45136808468144596, | |
| "grad_norm": 2.573768138885498, | |
| "learning_rate": 9.102966343411297e-06, | |
| "loss": 0.04, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.45536249251048533, | |
| "grad_norm": 3.1118862628936768, | |
| "learning_rate": 9.08870507701084e-06, | |
| "loss": 0.0662, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.45935690033952464, | |
| "grad_norm": 4.575135231018066, | |
| "learning_rate": 9.074443810610383e-06, | |
| "loss": 0.0412, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.463351308168564, | |
| "grad_norm": 3.333889961242676, | |
| "learning_rate": 9.060182544209926e-06, | |
| "loss": 0.0422, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.46734571599760333, | |
| "grad_norm": 4.380234241485596, | |
| "learning_rate": 9.045921277809471e-06, | |
| "loss": 0.0678, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.4713401238266427, | |
| "grad_norm": 3.1090171337127686, | |
| "learning_rate": 9.031660011409013e-06, | |
| "loss": 0.0419, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.47533453165568207, | |
| "grad_norm": 3.1724114418029785, | |
| "learning_rate": 9.017398745008558e-06, | |
| "loss": 0.0471, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.4793289394847214, | |
| "grad_norm": 3.701120138168335, | |
| "learning_rate": 9.003137478608101e-06, | |
| "loss": 0.0431, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.48332334731376075, | |
| "grad_norm": 1.9329041242599487, | |
| "learning_rate": 8.988876212207644e-06, | |
| "loss": 0.032, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.48731775514280007, | |
| "grad_norm": 4.282286167144775, | |
| "learning_rate": 8.97461494580719e-06, | |
| "loss": 0.0469, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.49131216297183944, | |
| "grad_norm": 2.6785926818847656, | |
| "learning_rate": 8.960353679406733e-06, | |
| "loss": 0.0643, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.49530657080087875, | |
| "grad_norm": 5.211093425750732, | |
| "learning_rate": 8.946092413006276e-06, | |
| "loss": 0.0579, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.4993009786299181, | |
| "grad_norm": 4.612974643707275, | |
| "learning_rate": 8.931831146605819e-06, | |
| "loss": 0.0618, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.5032953864589574, | |
| "grad_norm": 4.7832441329956055, | |
| "learning_rate": 8.917569880205364e-06, | |
| "loss": 0.0567, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5072897942879968, | |
| "grad_norm": 3.8425779342651367, | |
| "learning_rate": 8.903308613804906e-06, | |
| "loss": 0.0489, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.5112842021170362, | |
| "grad_norm": 2.750922679901123, | |
| "learning_rate": 8.88904734740445e-06, | |
| "loss": 0.0438, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5152786099460755, | |
| "grad_norm": 3.5208797454833984, | |
| "learning_rate": 8.874786081003994e-06, | |
| "loss": 0.0472, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.5192730177751148, | |
| "grad_norm": 5.142436981201172, | |
| "learning_rate": 8.860524814603537e-06, | |
| "loss": 0.0622, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5232674256041542, | |
| "grad_norm": 3.287731885910034, | |
| "learning_rate": 8.846263548203082e-06, | |
| "loss": 0.0476, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5272618334331935, | |
| "grad_norm": 1.9734798669815063, | |
| "learning_rate": 8.832002281802625e-06, | |
| "loss": 0.0485, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5312562412622329, | |
| "grad_norm": 4.245563507080078, | |
| "learning_rate": 8.817741015402168e-06, | |
| "loss": 0.0572, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5352506490912722, | |
| "grad_norm": 3.6561875343322754, | |
| "learning_rate": 8.803479749001712e-06, | |
| "loss": 0.05, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.5392450569203115, | |
| "grad_norm": 3.4770493507385254, | |
| "learning_rate": 8.789218482601257e-06, | |
| "loss": 0.0455, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.5432394647493509, | |
| "grad_norm": 2.5556652545928955, | |
| "learning_rate": 8.7749572162008e-06, | |
| "loss": 0.0396, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5472338725783903, | |
| "grad_norm": 3.5269246101379395, | |
| "learning_rate": 8.760695949800343e-06, | |
| "loss": 0.0435, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5512282804074295, | |
| "grad_norm": 3.7632479667663574, | |
| "learning_rate": 8.746434683399886e-06, | |
| "loss": 0.0458, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5552226882364689, | |
| "grad_norm": 4.281711578369141, | |
| "learning_rate": 8.73217341699943e-06, | |
| "loss": 0.0511, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5592170960655083, | |
| "grad_norm": 3.2763116359710693, | |
| "learning_rate": 8.717912150598975e-06, | |
| "loss": 0.0405, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.5632115038945477, | |
| "grad_norm": 3.267866849899292, | |
| "learning_rate": 8.703650884198518e-06, | |
| "loss": 0.0393, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.567205911723587, | |
| "grad_norm": 3.6679697036743164, | |
| "learning_rate": 8.689389617798061e-06, | |
| "loss": 0.06, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.5712003195526263, | |
| "grad_norm": 2.4105310440063477, | |
| "learning_rate": 8.675128351397604e-06, | |
| "loss": 0.0406, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.5751947273816657, | |
| "grad_norm": 4.512986183166504, | |
| "learning_rate": 8.66086708499715e-06, | |
| "loss": 0.0522, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.579189135210705, | |
| "grad_norm": 2.781480550765991, | |
| "learning_rate": 8.646605818596692e-06, | |
| "loss": 0.0575, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.5831835430397444, | |
| "grad_norm": 2.455273151397705, | |
| "learning_rate": 8.632344552196236e-06, | |
| "loss": 0.0515, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.5871779508687837, | |
| "grad_norm": 3.5020835399627686, | |
| "learning_rate": 8.618083285795779e-06, | |
| "loss": 0.0493, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.591172358697823, | |
| "grad_norm": 2.549257278442383, | |
| "learning_rate": 8.603822019395322e-06, | |
| "loss": 0.0305, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.5951667665268624, | |
| "grad_norm": 3.308424949645996, | |
| "learning_rate": 8.589560752994867e-06, | |
| "loss": 0.0394, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.5991611743559018, | |
| "grad_norm": 4.140067100524902, | |
| "learning_rate": 8.57529948659441e-06, | |
| "loss": 0.0452, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.603155582184941, | |
| "grad_norm": 4.411773204803467, | |
| "learning_rate": 8.561038220193954e-06, | |
| "loss": 0.0564, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.6071499900139804, | |
| "grad_norm": 2.6754167079925537, | |
| "learning_rate": 8.546776953793497e-06, | |
| "loss": 0.0398, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.6111443978430198, | |
| "grad_norm": 2.152740001678467, | |
| "learning_rate": 8.532515687393042e-06, | |
| "loss": 0.0417, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.6151388056720591, | |
| "grad_norm": 2.487994909286499, | |
| "learning_rate": 8.518254420992585e-06, | |
| "loss": 0.0475, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6191332135010985, | |
| "grad_norm": 1.9966262578964233, | |
| "learning_rate": 8.503993154592128e-06, | |
| "loss": 0.0456, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6231276213301378, | |
| "grad_norm": 2.2700722217559814, | |
| "learning_rate": 8.489731888191672e-06, | |
| "loss": 0.0498, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6271220291591771, | |
| "grad_norm": 4.674302101135254, | |
| "learning_rate": 8.475470621791215e-06, | |
| "loss": 0.0531, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6311164369882165, | |
| "grad_norm": 3.2880172729492188, | |
| "learning_rate": 8.46120935539076e-06, | |
| "loss": 0.0418, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6351108448172559, | |
| "grad_norm": 4.65671968460083, | |
| "learning_rate": 8.446948088990303e-06, | |
| "loss": 0.0429, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.6391052526462951, | |
| "grad_norm": 2.744880199432373, | |
| "learning_rate": 8.432686822589846e-06, | |
| "loss": 0.038, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6430996604753345, | |
| "grad_norm": 3.2633752822875977, | |
| "learning_rate": 8.41842555618939e-06, | |
| "loss": 0.0508, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.6470940683043739, | |
| "grad_norm": 4.094485759735107, | |
| "learning_rate": 8.404164289788934e-06, | |
| "loss": 0.0482, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6510884761334133, | |
| "grad_norm": 3.6843981742858887, | |
| "learning_rate": 8.389903023388478e-06, | |
| "loss": 0.0426, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6550828839624525, | |
| "grad_norm": 2.887274742126465, | |
| "learning_rate": 8.375641756988021e-06, | |
| "loss": 0.051, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.6590772917914919, | |
| "grad_norm": 3.3279778957366943, | |
| "learning_rate": 8.361380490587566e-06, | |
| "loss": 0.0378, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.6630716996205313, | |
| "grad_norm": 3.5682291984558105, | |
| "learning_rate": 8.347119224187107e-06, | |
| "loss": 0.054, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6670661074495706, | |
| "grad_norm": 4.352251052856445, | |
| "learning_rate": 8.332857957786652e-06, | |
| "loss": 0.0431, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.67106051527861, | |
| "grad_norm": 2.8711681365966797, | |
| "learning_rate": 8.318596691386196e-06, | |
| "loss": 0.0351, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.6750549231076493, | |
| "grad_norm": 3.0659475326538086, | |
| "learning_rate": 8.304335424985739e-06, | |
| "loss": 0.0413, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.6790493309366886, | |
| "grad_norm": 5.653151988983154, | |
| "learning_rate": 8.290074158585282e-06, | |
| "loss": 0.0526, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.683043738765728, | |
| "grad_norm": 7.713840007781982, | |
| "learning_rate": 8.275812892184827e-06, | |
| "loss": 0.0495, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.6870381465947674, | |
| "grad_norm": 2.798320770263672, | |
| "learning_rate": 8.26155162578437e-06, | |
| "loss": 0.0436, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.6910325544238066, | |
| "grad_norm": 3.696972131729126, | |
| "learning_rate": 8.247290359383914e-06, | |
| "loss": 0.0413, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.695026962252846, | |
| "grad_norm": 2.2935080528259277, | |
| "learning_rate": 8.233029092983458e-06, | |
| "loss": 0.0517, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.6990213700818854, | |
| "grad_norm": 4.169670581817627, | |
| "learning_rate": 8.218767826583002e-06, | |
| "loss": 0.0499, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.7030157779109247, | |
| "grad_norm": 4.221024036407471, | |
| "learning_rate": 8.204506560182545e-06, | |
| "loss": 0.041, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7070101857399641, | |
| "grad_norm": 4.128349781036377, | |
| "learning_rate": 8.190245293782088e-06, | |
| "loss": 0.0598, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7110045935690034, | |
| "grad_norm": 3.2227325439453125, | |
| "learning_rate": 8.175984027381633e-06, | |
| "loss": 0.0468, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7149990013980427, | |
| "grad_norm": 2.807412624359131, | |
| "learning_rate": 8.161722760981175e-06, | |
| "loss": 0.0419, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.7189934092270821, | |
| "grad_norm": 3.685307741165161, | |
| "learning_rate": 8.14746149458072e-06, | |
| "loss": 0.05, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7229878170561215, | |
| "grad_norm": 3.1187121868133545, | |
| "learning_rate": 8.133200228180263e-06, | |
| "loss": 0.0483, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7269822248851607, | |
| "grad_norm": 3.4114596843719482, | |
| "learning_rate": 8.118938961779806e-06, | |
| "loss": 0.0494, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7309766327142001, | |
| "grad_norm": 1.188926100730896, | |
| "learning_rate": 8.104677695379351e-06, | |
| "loss": 0.0366, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7349710405432395, | |
| "grad_norm": 3.5981297492980957, | |
| "learning_rate": 8.090416428978894e-06, | |
| "loss": 0.0503, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7389654483722788, | |
| "grad_norm": 3.0543458461761475, | |
| "learning_rate": 8.076155162578438e-06, | |
| "loss": 0.0495, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7429598562013181, | |
| "grad_norm": 3.7244555950164795, | |
| "learning_rate": 8.06189389617798e-06, | |
| "loss": 0.0496, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7469542640303575, | |
| "grad_norm": 2.367532968521118, | |
| "learning_rate": 8.047632629777526e-06, | |
| "loss": 0.0418, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7509486718593968, | |
| "grad_norm": 2.6962149143218994, | |
| "learning_rate": 8.033371363377069e-06, | |
| "loss": 0.0379, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.7549430796884362, | |
| "grad_norm": 3.7440872192382812, | |
| "learning_rate": 8.019110096976612e-06, | |
| "loss": 0.0411, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7589374875174756, | |
| "grad_norm": 3.3427021503448486, | |
| "learning_rate": 8.004848830576156e-06, | |
| "loss": 0.0367, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.7629318953465148, | |
| "grad_norm": 3.3870105743408203, | |
| "learning_rate": 7.990587564175699e-06, | |
| "loss": 0.0575, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.7669263031755542, | |
| "grad_norm": 3.3747177124023438, | |
| "learning_rate": 7.976326297775244e-06, | |
| "loss": 0.0455, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.7709207110045936, | |
| "grad_norm": 2.9369425773620605, | |
| "learning_rate": 7.962065031374787e-06, | |
| "loss": 0.0474, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.774915118833633, | |
| "grad_norm": 2.0219178199768066, | |
| "learning_rate": 7.94780376497433e-06, | |
| "loss": 0.0496, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.7789095266626722, | |
| "grad_norm": 3.041201591491699, | |
| "learning_rate": 7.933542498573873e-06, | |
| "loss": 0.0386, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.7829039344917116, | |
| "grad_norm": 3.874758243560791, | |
| "learning_rate": 7.919281232173418e-06, | |
| "loss": 0.0331, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.786898342320751, | |
| "grad_norm": 2.3752293586730957, | |
| "learning_rate": 7.905019965772962e-06, | |
| "loss": 0.0477, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.7908927501497903, | |
| "grad_norm": 4.09476900100708, | |
| "learning_rate": 7.890758699372505e-06, | |
| "loss": 0.051, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.7948871579788296, | |
| "grad_norm": 1.380469799041748, | |
| "learning_rate": 7.876497432972048e-06, | |
| "loss": 0.0313, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.798881565807869, | |
| "grad_norm": 2.8417437076568604, | |
| "learning_rate": 7.862236166571591e-06, | |
| "loss": 0.0444, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.798881565807869, | |
| "eval_loss": 0.02616371586918831, | |
| "eval_runtime": 9253.1401, | |
| "eval_samples_per_second": 2.164, | |
| "eval_steps_per_second": 0.271, | |
| "eval_wer": 2.3495261116052184, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.8028759736369083, | |
| "grad_norm": 4.659438133239746, | |
| "learning_rate": 7.847974900171136e-06, | |
| "loss": 0.0459, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.8068703814659477, | |
| "grad_norm": 2.24816632270813, | |
| "learning_rate": 7.83371363377068e-06, | |
| "loss": 0.0405, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.8108647892949871, | |
| "grad_norm": 2.8032522201538086, | |
| "learning_rate": 7.819452367370223e-06, | |
| "loss": 0.0393, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.8148591971240263, | |
| "grad_norm": 3.3999099731445312, | |
| "learning_rate": 7.805191100969766e-06, | |
| "loss": 0.0541, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.8188536049530657, | |
| "grad_norm": 2.2319793701171875, | |
| "learning_rate": 7.790929834569311e-06, | |
| "loss": 0.0474, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.8228480127821051, | |
| "grad_norm": 2.5778238773345947, | |
| "learning_rate": 7.776668568168854e-06, | |
| "loss": 0.0409, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.8268424206111444, | |
| "grad_norm": 2.59147047996521, | |
| "learning_rate": 7.762407301768397e-06, | |
| "loss": 0.0561, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.8308368284401837, | |
| "grad_norm": 2.459582805633545, | |
| "learning_rate": 7.748146035367942e-06, | |
| "loss": 0.0453, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.8348312362692231, | |
| "grad_norm": 4.553118705749512, | |
| "learning_rate": 7.733884768967484e-06, | |
| "loss": 0.0417, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.8388256440982624, | |
| "grad_norm": 3.4171009063720703, | |
| "learning_rate": 7.719623502567029e-06, | |
| "loss": 0.048, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.8428200519273018, | |
| "grad_norm": 3.3071610927581787, | |
| "learning_rate": 7.705362236166572e-06, | |
| "loss": 0.0415, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.8468144597563412, | |
| "grad_norm": 2.8889715671539307, | |
| "learning_rate": 7.691100969766115e-06, | |
| "loss": 0.0384, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.8508088675853804, | |
| "grad_norm": 2.495051145553589, | |
| "learning_rate": 7.676839703365659e-06, | |
| "loss": 0.0591, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.8548032754144198, | |
| "grad_norm": 3.1327059268951416, | |
| "learning_rate": 7.662578436965204e-06, | |
| "loss": 0.0492, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.8587976832434592, | |
| "grad_norm": 6.273247241973877, | |
| "learning_rate": 7.648317170564747e-06, | |
| "loss": 0.056, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.8627920910724985, | |
| "grad_norm": 4.723370552062988, | |
| "learning_rate": 7.63405590416429e-06, | |
| "loss": 0.039, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.8667864989015378, | |
| "grad_norm": 2.1236581802368164, | |
| "learning_rate": 7.619794637763834e-06, | |
| "loss": 0.0404, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.8707809067305772, | |
| "grad_norm": 3.278244733810425, | |
| "learning_rate": 7.6055333713633774e-06, | |
| "loss": 0.0425, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.8747753145596165, | |
| "grad_norm": 3.6880099773406982, | |
| "learning_rate": 7.5912721049629215e-06, | |
| "loss": 0.0321, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.8787697223886559, | |
| "grad_norm": 2.7336859703063965, | |
| "learning_rate": 7.577010838562466e-06, | |
| "loss": 0.0373, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.8827641302176952, | |
| "grad_norm": 4.318443775177002, | |
| "learning_rate": 7.562749572162008e-06, | |
| "loss": 0.0446, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.8867585380467345, | |
| "grad_norm": 2.5419094562530518, | |
| "learning_rate": 7.548488305761552e-06, | |
| "loss": 0.0397, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.8907529458757739, | |
| "grad_norm": 3.2107887268066406, | |
| "learning_rate": 7.534227039361096e-06, | |
| "loss": 0.0477, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.8947473537048133, | |
| "grad_norm": 2.974850654602051, | |
| "learning_rate": 7.519965772960639e-06, | |
| "loss": 0.0492, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.8987417615338527, | |
| "grad_norm": 2.0521247386932373, | |
| "learning_rate": 7.505704506560183e-06, | |
| "loss": 0.0457, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.9027361693628919, | |
| "grad_norm": 2.3275723457336426, | |
| "learning_rate": 7.491443240159727e-06, | |
| "loss": 0.0384, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.9067305771919313, | |
| "grad_norm": 2.8880300521850586, | |
| "learning_rate": 7.47718197375927e-06, | |
| "loss": 0.0366, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.9107249850209707, | |
| "grad_norm": 2.3944599628448486, | |
| "learning_rate": 7.462920707358814e-06, | |
| "loss": 0.0312, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.91471939285001, | |
| "grad_norm": 4.355307579040527, | |
| "learning_rate": 7.448659440958358e-06, | |
| "loss": 0.0571, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.9187138006790493, | |
| "grad_norm": 4.85425329208374, | |
| "learning_rate": 7.434398174557901e-06, | |
| "loss": 0.0423, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.9227082085080887, | |
| "grad_norm": 2.8697617053985596, | |
| "learning_rate": 7.420136908157445e-06, | |
| "loss": 0.0444, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.926702616337128, | |
| "grad_norm": 1.9778342247009277, | |
| "learning_rate": 7.405875641756989e-06, | |
| "loss": 0.0392, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.9306970241661674, | |
| "grad_norm": 2.4340884685516357, | |
| "learning_rate": 7.391614375356533e-06, | |
| "loss": 0.0388, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.9346914319952067, | |
| "grad_norm": 4.2049102783203125, | |
| "learning_rate": 7.377353108956075e-06, | |
| "loss": 0.0494, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.938685839824246, | |
| "grad_norm": 2.0921010971069336, | |
| "learning_rate": 7.3630918425556194e-06, | |
| "loss": 0.0533, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.9426802476532854, | |
| "grad_norm": 1.6509953737258911, | |
| "learning_rate": 7.3488305761551635e-06, | |
| "loss": 0.0397, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.9466746554823248, | |
| "grad_norm": 1.8162307739257812, | |
| "learning_rate": 7.334569309754707e-06, | |
| "loss": 0.0317, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.9506690633113641, | |
| "grad_norm": 2.387477159500122, | |
| "learning_rate": 7.320308043354251e-06, | |
| "loss": 0.0383, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.9546634711404034, | |
| "grad_norm": 3.507629632949829, | |
| "learning_rate": 7.306046776953795e-06, | |
| "loss": 0.0488, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.9586578789694428, | |
| "grad_norm": 2.1720080375671387, | |
| "learning_rate": 7.291785510553337e-06, | |
| "loss": 0.0324, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.9626522867984821, | |
| "grad_norm": 3.1510326862335205, | |
| "learning_rate": 7.2775242441528815e-06, | |
| "loss": 0.0418, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.9666466946275215, | |
| "grad_norm": 4.186088562011719, | |
| "learning_rate": 7.2632629777524256e-06, | |
| "loss": 0.0328, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.9706411024565608, | |
| "grad_norm": 3.4489238262176514, | |
| "learning_rate": 7.249001711351969e-06, | |
| "loss": 0.0648, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.9746355102856001, | |
| "grad_norm": 3.5806596279144287, | |
| "learning_rate": 7.234740444951512e-06, | |
| "loss": 0.0594, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.9786299181146395, | |
| "grad_norm": 2.1058568954467773, | |
| "learning_rate": 7.220479178551056e-06, | |
| "loss": 0.0419, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.9826243259436789, | |
| "grad_norm": 4.414669036865234, | |
| "learning_rate": 7.206217912150599e-06, | |
| "loss": 0.0448, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.9866187337727182, | |
| "grad_norm": 2.6314189434051514, | |
| "learning_rate": 7.1919566457501435e-06, | |
| "loss": 0.0367, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.9906131416017575, | |
| "grad_norm": 4.393731594085693, | |
| "learning_rate": 7.1776953793496876e-06, | |
| "loss": 0.0571, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.9946075494307969, | |
| "grad_norm": 2.5744457244873047, | |
| "learning_rate": 7.16343411294923e-06, | |
| "loss": 0.0438, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.9986019572598362, | |
| "grad_norm": 3.193990707397461, | |
| "learning_rate": 7.149172846548774e-06, | |
| "loss": 0.0665, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.0023966446974235, | |
| "grad_norm": 1.0991617441177368, | |
| "learning_rate": 7.134911580148318e-06, | |
| "loss": 0.0287, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.006391052526463, | |
| "grad_norm": 1.510758399963379, | |
| "learning_rate": 7.120650313747861e-06, | |
| "loss": 0.0119, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.0103854603555023, | |
| "grad_norm": 0.9334334135055542, | |
| "learning_rate": 7.1063890473474055e-06, | |
| "loss": 0.0143, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.0143798681845417, | |
| "grad_norm": 1.8770062923431396, | |
| "learning_rate": 7.092127780946949e-06, | |
| "loss": 0.0114, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.018374276013581, | |
| "grad_norm": 2.712904930114746, | |
| "learning_rate": 7.077866514546492e-06, | |
| "loss": 0.0192, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.0223686838426203, | |
| "grad_norm": 1.7017282247543335, | |
| "learning_rate": 7.063605248146036e-06, | |
| "loss": 0.0143, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.0263630916716597, | |
| "grad_norm": 1.6502726078033447, | |
| "learning_rate": 7.04934398174558e-06, | |
| "loss": 0.0139, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.030357499500699, | |
| "grad_norm": 1.7290059328079224, | |
| "learning_rate": 7.035082715345123e-06, | |
| "loss": 0.0157, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.0343519073297385, | |
| "grad_norm": 1.657251238822937, | |
| "learning_rate": 7.020821448944667e-06, | |
| "loss": 0.0175, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.0383463151587777, | |
| "grad_norm": 1.3441667556762695, | |
| "learning_rate": 7.006560182544211e-06, | |
| "loss": 0.0188, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.042340722987817, | |
| "grad_norm": 1.1778837442398071, | |
| "learning_rate": 6.992298916143754e-06, | |
| "loss": 0.0113, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.0463351308168565, | |
| "grad_norm": 1.3368825912475586, | |
| "learning_rate": 6.978037649743298e-06, | |
| "loss": 0.0113, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.0503295386458957, | |
| "grad_norm": 1.137683391571045, | |
| "learning_rate": 6.963776383342841e-06, | |
| "loss": 0.0157, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.054323946474935, | |
| "grad_norm": 1.192594051361084, | |
| "learning_rate": 6.949515116942385e-06, | |
| "loss": 0.0139, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.0583183543039745, | |
| "grad_norm": 1.4994938373565674, | |
| "learning_rate": 6.935253850541929e-06, | |
| "loss": 0.0176, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.0623127621330137, | |
| "grad_norm": 2.461247205734253, | |
| "learning_rate": 6.920992584141473e-06, | |
| "loss": 0.0157, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.0663071699620532, | |
| "grad_norm": 1.8828457593917847, | |
| "learning_rate": 6.906731317741015e-06, | |
| "loss": 0.0128, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.0703015777910925, | |
| "grad_norm": 1.1698198318481445, | |
| "learning_rate": 6.892470051340559e-06, | |
| "loss": 0.016, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.0742959856201317, | |
| "grad_norm": 0.35763826966285706, | |
| "learning_rate": 6.878208784940103e-06, | |
| "loss": 0.0113, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.0782903934491712, | |
| "grad_norm": 2.0067079067230225, | |
| "learning_rate": 6.863947518539647e-06, | |
| "loss": 0.0143, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.0822848012782105, | |
| "grad_norm": 3.237484931945801, | |
| "learning_rate": 6.849686252139191e-06, | |
| "loss": 0.0103, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.08627920910725, | |
| "grad_norm": 1.5877996683120728, | |
| "learning_rate": 6.835424985738735e-06, | |
| "loss": 0.0165, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.0902736169362892, | |
| "grad_norm": 1.694011926651001, | |
| "learning_rate": 6.821163719338277e-06, | |
| "loss": 0.0178, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.0942680247653285, | |
| "grad_norm": 0.9158272743225098, | |
| "learning_rate": 6.806902452937821e-06, | |
| "loss": 0.0122, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.098262432594368, | |
| "grad_norm": 1.727840542793274, | |
| "learning_rate": 6.792641186537365e-06, | |
| "loss": 0.0117, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.1022568404234072, | |
| "grad_norm": 2.3446457386016846, | |
| "learning_rate": 6.778379920136909e-06, | |
| "loss": 0.0149, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.1062512482524465, | |
| "grad_norm": 0.6321650147438049, | |
| "learning_rate": 6.764118653736452e-06, | |
| "loss": 0.0107, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.110245656081486, | |
| "grad_norm": 0.8098093867301941, | |
| "learning_rate": 6.749857387335996e-06, | |
| "loss": 0.0103, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.1142400639105252, | |
| "grad_norm": 0.9799767136573792, | |
| "learning_rate": 6.735596120935539e-06, | |
| "loss": 0.0088, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.1182344717395647, | |
| "grad_norm": 1.495099663734436, | |
| "learning_rate": 6.721334854535083e-06, | |
| "loss": 0.0174, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.122228879568604, | |
| "grad_norm": 0.9526494145393372, | |
| "learning_rate": 6.7070735881346274e-06, | |
| "loss": 0.019, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.1262232873976432, | |
| "grad_norm": 2.276949405670166, | |
| "learning_rate": 6.69281232173417e-06, | |
| "loss": 0.0153, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.1302176952266827, | |
| "grad_norm": 1.0506013631820679, | |
| "learning_rate": 6.678551055333714e-06, | |
| "loss": 0.0126, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.134212103055722, | |
| "grad_norm": 1.4864287376403809, | |
| "learning_rate": 6.664289788933258e-06, | |
| "loss": 0.019, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.1382065108847614, | |
| "grad_norm": 1.2148163318634033, | |
| "learning_rate": 6.650028522532801e-06, | |
| "loss": 0.012, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.1422009187138007, | |
| "grad_norm": 0.8917925953865051, | |
| "learning_rate": 6.635767256132345e-06, | |
| "loss": 0.0107, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.14619532654284, | |
| "grad_norm": 1.3681126832962036, | |
| "learning_rate": 6.621505989731889e-06, | |
| "loss": 0.0136, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.1501897343718794, | |
| "grad_norm": 2.1534805297851562, | |
| "learning_rate": 6.607244723331432e-06, | |
| "loss": 0.0159, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.1541841422009187, | |
| "grad_norm": 3.416405200958252, | |
| "learning_rate": 6.592983456930976e-06, | |
| "loss": 0.0113, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.158178550029958, | |
| "grad_norm": 1.62083101272583, | |
| "learning_rate": 6.57872219053052e-06, | |
| "loss": 0.0166, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.1621729578589974, | |
| "grad_norm": 1.1295747756958008, | |
| "learning_rate": 6.5644609241300625e-06, | |
| "loss": 0.0128, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.1661673656880367, | |
| "grad_norm": 0.9038260579109192, | |
| "learning_rate": 6.5501996577296066e-06, | |
| "loss": 0.0177, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.1701617735170762, | |
| "grad_norm": 2.222172975540161, | |
| "learning_rate": 6.535938391329151e-06, | |
| "loss": 0.0143, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.1741561813461154, | |
| "grad_norm": 3.0676374435424805, | |
| "learning_rate": 6.521677124928694e-06, | |
| "loss": 0.0182, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.1781505891751547, | |
| "grad_norm": 1.815803050994873, | |
| "learning_rate": 6.507415858528238e-06, | |
| "loss": 0.0149, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.1821449970041942, | |
| "grad_norm": 0.9265576601028442, | |
| "learning_rate": 6.493154592127781e-06, | |
| "loss": 0.0109, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.1861394048332334, | |
| "grad_norm": 1.4228824377059937, | |
| "learning_rate": 6.478893325727325e-06, | |
| "loss": 0.0141, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.190133812662273, | |
| "grad_norm": 0.7584865093231201, | |
| "learning_rate": 6.464632059326869e-06, | |
| "loss": 0.013, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.1941282204913122, | |
| "grad_norm": 1.733510136604309, | |
| "learning_rate": 6.450370792926413e-06, | |
| "loss": 0.0157, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.1981226283203514, | |
| "grad_norm": 4.701653003692627, | |
| "learning_rate": 6.436109526525957e-06, | |
| "loss": 0.0196, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.202117036149391, | |
| "grad_norm": 1.6298191547393799, | |
| "learning_rate": 6.421848260125499e-06, | |
| "loss": 0.0154, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.2061114439784302, | |
| "grad_norm": 1.8565294742584229, | |
| "learning_rate": 6.407586993725043e-06, | |
| "loss": 0.0114, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.2101058518074694, | |
| "grad_norm": 1.4582592248916626, | |
| "learning_rate": 6.393325727324587e-06, | |
| "loss": 0.0119, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.214100259636509, | |
| "grad_norm": 1.6364414691925049, | |
| "learning_rate": 6.379064460924131e-06, | |
| "loss": 0.0123, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.2180946674655482, | |
| "grad_norm": 1.888590693473816, | |
| "learning_rate": 6.364803194523675e-06, | |
| "loss": 0.014, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.2220890752945877, | |
| "grad_norm": 1.981140375137329, | |
| "learning_rate": 6.350541928123218e-06, | |
| "loss": 0.0139, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.226083483123627, | |
| "grad_norm": 1.8807272911071777, | |
| "learning_rate": 6.336280661722761e-06, | |
| "loss": 0.0132, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.2300778909526662, | |
| "grad_norm": 2.573253870010376, | |
| "learning_rate": 6.322019395322305e-06, | |
| "loss": 0.0117, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.2340722987817057, | |
| "grad_norm": 3.0794425010681152, | |
| "learning_rate": 6.307758128921849e-06, | |
| "loss": 0.016, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.238066706610745, | |
| "grad_norm": 0.6964554786682129, | |
| "learning_rate": 6.293496862521392e-06, | |
| "loss": 0.0124, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.2420611144397844, | |
| "grad_norm": 2.4325902462005615, | |
| "learning_rate": 6.279235596120936e-06, | |
| "loss": 0.0116, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.2460555222688237, | |
| "grad_norm": 1.6838932037353516, | |
| "learning_rate": 6.26497432972048e-06, | |
| "loss": 0.0136, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.250049930097863, | |
| "grad_norm": 2.2973756790161133, | |
| "learning_rate": 6.250713063320023e-06, | |
| "loss": 0.0107, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.2540443379269024, | |
| "grad_norm": 1.736557126045227, | |
| "learning_rate": 6.236451796919567e-06, | |
| "loss": 0.0107, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.2580387457559417, | |
| "grad_norm": 0.5728934407234192, | |
| "learning_rate": 6.222190530519111e-06, | |
| "loss": 0.0135, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.262033153584981, | |
| "grad_norm": 1.3297735452651978, | |
| "learning_rate": 6.207929264118654e-06, | |
| "loss": 0.011, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.2660275614140204, | |
| "grad_norm": 1.6742457151412964, | |
| "learning_rate": 6.193667997718198e-06, | |
| "loss": 0.0125, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.2700219692430597, | |
| "grad_norm": 1.9207468032836914, | |
| "learning_rate": 6.179406731317742e-06, | |
| "loss": 0.0136, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.2740163770720991, | |
| "grad_norm": 1.9225355386734009, | |
| "learning_rate": 6.165145464917285e-06, | |
| "loss": 0.0112, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.2780107849011384, | |
| "grad_norm": 2.1411452293395996, | |
| "learning_rate": 6.1508841985168285e-06, | |
| "loss": 0.0132, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.2820051927301779, | |
| "grad_norm": 1.7551456689834595, | |
| "learning_rate": 6.136622932116373e-06, | |
| "loss": 0.0184, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.2859996005592171, | |
| "grad_norm": 0.43378517031669617, | |
| "learning_rate": 6.122361665715916e-06, | |
| "loss": 0.0158, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.2899940083882564, | |
| "grad_norm": 1.7864091396331787, | |
| "learning_rate": 6.10810039931546e-06, | |
| "loss": 0.0167, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.2939884162172959, | |
| "grad_norm": 0.7298623323440552, | |
| "learning_rate": 6.093839132915004e-06, | |
| "loss": 0.0089, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.2979828240463351, | |
| "grad_norm": 2.0681302547454834, | |
| "learning_rate": 6.0795778665145464e-06, | |
| "loss": 0.0162, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.3019772318753744, | |
| "grad_norm": 0.9076918363571167, | |
| "learning_rate": 6.0653166001140905e-06, | |
| "loss": 0.0134, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.3059716397044139, | |
| "grad_norm": 1.121147871017456, | |
| "learning_rate": 6.051055333713635e-06, | |
| "loss": 0.014, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.3099660475334531, | |
| "grad_norm": 1.6624290943145752, | |
| "learning_rate": 6.036794067313178e-06, | |
| "loss": 0.0119, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.3139604553624924, | |
| "grad_norm": 2.1707956790924072, | |
| "learning_rate": 6.022532800912721e-06, | |
| "loss": 0.0147, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.3179548631915319, | |
| "grad_norm": 2.256680488586426, | |
| "learning_rate": 6.008271534512265e-06, | |
| "loss": 0.0152, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.3219492710205711, | |
| "grad_norm": 1.446263074874878, | |
| "learning_rate": 5.9940102681118085e-06, | |
| "loss": 0.011, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.3259436788496106, | |
| "grad_norm": 1.5500271320343018, | |
| "learning_rate": 5.9797490017113525e-06, | |
| "loss": 0.0145, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.3299380866786499, | |
| "grad_norm": 3.1068994998931885, | |
| "learning_rate": 5.965487735310897e-06, | |
| "loss": 0.0111, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.3339324945076894, | |
| "grad_norm": 1.4944040775299072, | |
| "learning_rate": 5.951226468910439e-06, | |
| "loss": 0.0126, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.3379269023367286, | |
| "grad_norm": 1.89198637008667, | |
| "learning_rate": 5.936965202509983e-06, | |
| "loss": 0.0117, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.3419213101657679, | |
| "grad_norm": 0.9884303212165833, | |
| "learning_rate": 5.922703936109527e-06, | |
| "loss": 0.0103, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.3459157179948074, | |
| "grad_norm": 1.8343956470489502, | |
| "learning_rate": 5.9084426697090705e-06, | |
| "loss": 0.0121, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.3499101258238466, | |
| "grad_norm": 2.7172069549560547, | |
| "learning_rate": 5.8941814033086146e-06, | |
| "loss": 0.0123, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.3539045336528859, | |
| "grad_norm": 2.1298084259033203, | |
| "learning_rate": 5.879920136908158e-06, | |
| "loss": 0.0112, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.3578989414819254, | |
| "grad_norm": 2.0694286823272705, | |
| "learning_rate": 5.865658870507701e-06, | |
| "loss": 0.0132, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.3618933493109646, | |
| "grad_norm": 2.3463828563690186, | |
| "learning_rate": 5.851397604107245e-06, | |
| "loss": 0.013, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.3658877571400039, | |
| "grad_norm": 0.793806254863739, | |
| "learning_rate": 5.837136337706789e-06, | |
| "loss": 0.0121, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.3698821649690434, | |
| "grad_norm": 2.076143264770508, | |
| "learning_rate": 5.822875071306332e-06, | |
| "loss": 0.01, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.3738765727980826, | |
| "grad_norm": 1.482211709022522, | |
| "learning_rate": 5.808613804905876e-06, | |
| "loss": 0.0141, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.377870980627122, | |
| "grad_norm": 2.222541093826294, | |
| "learning_rate": 5.79435253850542e-06, | |
| "loss": 0.0131, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.3818653884561614, | |
| "grad_norm": 1.325785517692566, | |
| "learning_rate": 5.780091272104963e-06, | |
| "loss": 0.013, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.3858597962852008, | |
| "grad_norm": 2.414950132369995, | |
| "learning_rate": 5.765830005704507e-06, | |
| "loss": 0.0112, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.38985420411424, | |
| "grad_norm": 2.191398859024048, | |
| "learning_rate": 5.751568739304051e-06, | |
| "loss": 0.0108, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.3938486119432794, | |
| "grad_norm": 0.8840504884719849, | |
| "learning_rate": 5.737307472903594e-06, | |
| "loss": 0.0075, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.3978430197723188, | |
| "grad_norm": 0.669025719165802, | |
| "learning_rate": 5.723046206503138e-06, | |
| "loss": 0.0126, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.401837427601358, | |
| "grad_norm": 1.5776950120925903, | |
| "learning_rate": 5.708784940102682e-06, | |
| "loss": 0.0156, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.4058318354303974, | |
| "grad_norm": 2.344930410385132, | |
| "learning_rate": 5.694523673702225e-06, | |
| "loss": 0.0145, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.4098262432594368, | |
| "grad_norm": 0.4023391902446747, | |
| "learning_rate": 5.680262407301768e-06, | |
| "loss": 0.0137, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.413820651088476, | |
| "grad_norm": 2.0755484104156494, | |
| "learning_rate": 5.6660011409013125e-06, | |
| "loss": 0.0154, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.4178150589175154, | |
| "grad_norm": 1.70918869972229, | |
| "learning_rate": 5.651739874500856e-06, | |
| "loss": 0.0115, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.4218094667465548, | |
| "grad_norm": 1.3769606351852417, | |
| "learning_rate": 5.6374786081004e-06, | |
| "loss": 0.0141, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.425803874575594, | |
| "grad_norm": 2.3350486755371094, | |
| "learning_rate": 5.623217341699944e-06, | |
| "loss": 0.0107, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.4297982824046336, | |
| "grad_norm": 2.62070894241333, | |
| "learning_rate": 5.608956075299488e-06, | |
| "loss": 0.0125, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.4337926902336728, | |
| "grad_norm": 2.3193471431732178, | |
| "learning_rate": 5.59469480889903e-06, | |
| "loss": 0.0103, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.4377870980627123, | |
| "grad_norm": 3.429668426513672, | |
| "learning_rate": 5.5804335424985745e-06, | |
| "loss": 0.0177, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.4417815058917516, | |
| "grad_norm": 1.2763311862945557, | |
| "learning_rate": 5.566172276098119e-06, | |
| "loss": 0.0139, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.4457759137207908, | |
| "grad_norm": 1.1153768301010132, | |
| "learning_rate": 5.551911009697661e-06, | |
| "loss": 0.0086, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.4497703215498303, | |
| "grad_norm": 2.515153646469116, | |
| "learning_rate": 5.537649743297205e-06, | |
| "loss": 0.0138, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.4537647293788696, | |
| "grad_norm": 1.5183403491973877, | |
| "learning_rate": 5.523388476896749e-06, | |
| "loss": 0.0135, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.4577591372079088, | |
| "grad_norm": 1.2702558040618896, | |
| "learning_rate": 5.509127210496292e-06, | |
| "loss": 0.0118, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.4617535450369483, | |
| "grad_norm": 1.1038216352462769, | |
| "learning_rate": 5.4948659440958365e-06, | |
| "loss": 0.0132, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.4657479528659876, | |
| "grad_norm": 1.4590846300125122, | |
| "learning_rate": 5.480604677695381e-06, | |
| "loss": 0.0096, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.4697423606950268, | |
| "grad_norm": 0.9967065453529358, | |
| "learning_rate": 5.466343411294923e-06, | |
| "loss": 0.0094, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.4737367685240663, | |
| "grad_norm": 2.1992242336273193, | |
| "learning_rate": 5.452082144894467e-06, | |
| "loss": 0.0112, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.4777311763531056, | |
| "grad_norm": 2.9212288856506348, | |
| "learning_rate": 5.437820878494011e-06, | |
| "loss": 0.0122, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.481725584182145, | |
| "grad_norm": 1.6093804836273193, | |
| "learning_rate": 5.4235596120935544e-06, | |
| "loss": 0.0098, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.4857199920111843, | |
| "grad_norm": 2.4155471324920654, | |
| "learning_rate": 5.409298345693098e-06, | |
| "loss": 0.0156, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.4897143998402238, | |
| "grad_norm": 1.4361823797225952, | |
| "learning_rate": 5.395037079292642e-06, | |
| "loss": 0.0124, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.493708807669263, | |
| "grad_norm": 1.1570395231246948, | |
| "learning_rate": 5.380775812892185e-06, | |
| "loss": 0.0073, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.4977032154983023, | |
| "grad_norm": 1.300065040588379, | |
| "learning_rate": 5.366514546491729e-06, | |
| "loss": 0.0094, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.5016976233273418, | |
| "grad_norm": 2.148361921310425, | |
| "learning_rate": 5.352253280091273e-06, | |
| "loss": 0.014, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.505692031156381, | |
| "grad_norm": 0.6852880716323853, | |
| "learning_rate": 5.337992013690816e-06, | |
| "loss": 0.0143, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.5096864389854203, | |
| "grad_norm": 1.4893112182617188, | |
| "learning_rate": 5.32373074729036e-06, | |
| "loss": 0.0126, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.5136808468144598, | |
| "grad_norm": 2.3349452018737793, | |
| "learning_rate": 5.309469480889904e-06, | |
| "loss": 0.0103, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.517675254643499, | |
| "grad_norm": 0.9701346755027771, | |
| "learning_rate": 5.295208214489447e-06, | |
| "loss": 0.0104, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.5216696624725383, | |
| "grad_norm": 1.6349523067474365, | |
| "learning_rate": 5.280946948088991e-06, | |
| "loss": 0.0125, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.5256640703015778, | |
| "grad_norm": 2.166870355606079, | |
| "learning_rate": 5.266685681688534e-06, | |
| "loss": 0.0171, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.5296584781306173, | |
| "grad_norm": 0.6497751474380493, | |
| "learning_rate": 5.252424415288078e-06, | |
| "loss": 0.013, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.5336528859596563, | |
| "grad_norm": 2.074389696121216, | |
| "learning_rate": 5.238163148887622e-06, | |
| "loss": 0.0168, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.5376472937886958, | |
| "grad_norm": 1.5809192657470703, | |
| "learning_rate": 5.223901882487166e-06, | |
| "loss": 0.0121, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.5416417016177353, | |
| "grad_norm": 1.357330083847046, | |
| "learning_rate": 5.209640616086708e-06, | |
| "loss": 0.0115, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.5456361094467745, | |
| "grad_norm": 1.0214003324508667, | |
| "learning_rate": 5.195379349686252e-06, | |
| "loss": 0.0118, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.5496305172758138, | |
| "grad_norm": 1.156663417816162, | |
| "learning_rate": 5.1811180832857964e-06, | |
| "loss": 0.0134, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.5536249251048533, | |
| "grad_norm": 1.151166558265686, | |
| "learning_rate": 5.16685681688534e-06, | |
| "loss": 0.0134, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.5576193329338925, | |
| "grad_norm": 1.5714129209518433, | |
| "learning_rate": 5.152595550484884e-06, | |
| "loss": 0.0157, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.5616137407629318, | |
| "grad_norm": 2.01296329498291, | |
| "learning_rate": 5.138334284084428e-06, | |
| "loss": 0.0151, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.5656081485919713, | |
| "grad_norm": 1.7755038738250732, | |
| "learning_rate": 5.12407301768397e-06, | |
| "loss": 0.0097, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.5696025564210105, | |
| "grad_norm": 1.5821765661239624, | |
| "learning_rate": 5.109811751283514e-06, | |
| "loss": 0.0171, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.5735969642500498, | |
| "grad_norm": 2.132004976272583, | |
| "learning_rate": 5.0955504848830584e-06, | |
| "loss": 0.009, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.5775913720790893, | |
| "grad_norm": 0.8342037796974182, | |
| "learning_rate": 5.081289218482601e-06, | |
| "loss": 0.0122, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.5815857799081288, | |
| "grad_norm": 0.9015669226646423, | |
| "learning_rate": 5.067027952082145e-06, | |
| "loss": 0.0123, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.5855801877371678, | |
| "grad_norm": 1.3912228345870972, | |
| "learning_rate": 5.052766685681689e-06, | |
| "loss": 0.0111, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.5895745955662073, | |
| "grad_norm": 1.776029109954834, | |
| "learning_rate": 5.038505419281232e-06, | |
| "loss": 0.0104, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.5935690033952468, | |
| "grad_norm": 1.5565587282180786, | |
| "learning_rate": 5.024244152880776e-06, | |
| "loss": 0.0156, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.597563411224286, | |
| "grad_norm": 1.2180767059326172, | |
| "learning_rate": 5.0099828864803205e-06, | |
| "loss": 0.0106, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.597563411224286, | |
| "eval_loss": 0.014898120425641537, | |
| "eval_runtime": 7505.9552, | |
| "eval_samples_per_second": 2.668, | |
| "eval_steps_per_second": 0.334, | |
| "eval_wer": 1.2347834627409684, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.6015578190533253, | |
| "grad_norm": 2.9766318798065186, | |
| "learning_rate": 4.995721620079864e-06, | |
| "loss": 0.0176, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.6055522268823648, | |
| "grad_norm": 1.588296890258789, | |
| "learning_rate": 4.981460353679407e-06, | |
| "loss": 0.0114, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.609546634711404, | |
| "grad_norm": 0.9115377068519592, | |
| "learning_rate": 4.96719908727895e-06, | |
| "loss": 0.0098, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.6135410425404433, | |
| "grad_norm": 1.0077248811721802, | |
| "learning_rate": 4.952937820878494e-06, | |
| "loss": 0.0079, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.6175354503694828, | |
| "grad_norm": 2.668668031692505, | |
| "learning_rate": 4.9386765544780376e-06, | |
| "loss": 0.0128, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.621529858198522, | |
| "grad_norm": 2.5450539588928223, | |
| "learning_rate": 4.924415288077582e-06, | |
| "loss": 0.015, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.6255242660275613, | |
| "grad_norm": 1.6117314100265503, | |
| "learning_rate": 4.910154021677126e-06, | |
| "loss": 0.0098, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.6295186738566008, | |
| "grad_norm": 1.3903907537460327, | |
| "learning_rate": 4.895892755276669e-06, | |
| "loss": 0.0103, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.6335130816856402, | |
| "grad_norm": 2.082406997680664, | |
| "learning_rate": 4.881631488876213e-06, | |
| "loss": 0.0138, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.6375074895146795, | |
| "grad_norm": 2.9802393913269043, | |
| "learning_rate": 4.867370222475756e-06, | |
| "loss": 0.0162, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.6415018973437188, | |
| "grad_norm": 0.9118614792823792, | |
| "learning_rate": 4.8531089560753e-06, | |
| "loss": 0.0175, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.6454963051727582, | |
| "grad_norm": 2.3443613052368164, | |
| "learning_rate": 4.838847689674844e-06, | |
| "loss": 0.013, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.6494907130017975, | |
| "grad_norm": 1.987385630607605, | |
| "learning_rate": 4.824586423274387e-06, | |
| "loss": 0.0086, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.6534851208308368, | |
| "grad_norm": 1.1624454259872437, | |
| "learning_rate": 4.810325156873931e-06, | |
| "loss": 0.0148, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.6574795286598762, | |
| "grad_norm": 2.598879814147949, | |
| "learning_rate": 4.796063890473474e-06, | |
| "loss": 0.0106, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.6614739364889155, | |
| "grad_norm": 1.3221343755722046, | |
| "learning_rate": 4.781802624073018e-06, | |
| "loss": 0.0092, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.6654683443179548, | |
| "grad_norm": 2.23791766166687, | |
| "learning_rate": 4.767541357672562e-06, | |
| "loss": 0.0135, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.6694627521469942, | |
| "grad_norm": 2.4781031608581543, | |
| "learning_rate": 4.753280091272106e-06, | |
| "loss": 0.0122, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.6734571599760335, | |
| "grad_norm": 1.3030191659927368, | |
| "learning_rate": 4.739018824871649e-06, | |
| "loss": 0.0115, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.6774515678050728, | |
| "grad_norm": 1.5805338621139526, | |
| "learning_rate": 4.724757558471192e-06, | |
| "loss": 0.0105, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.6814459756341122, | |
| "grad_norm": 1.542032241821289, | |
| "learning_rate": 4.710496292070736e-06, | |
| "loss": 0.0098, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.6854403834631517, | |
| "grad_norm": 2.8560240268707275, | |
| "learning_rate": 4.6962350256702795e-06, | |
| "loss": 0.0119, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.689434791292191, | |
| "grad_norm": 3.2016873359680176, | |
| "learning_rate": 4.681973759269824e-06, | |
| "loss": 0.0149, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.6934291991212302, | |
| "grad_norm": 1.2048195600509644, | |
| "learning_rate": 4.667712492869368e-06, | |
| "loss": 0.0128, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.6974236069502697, | |
| "grad_norm": 2.8393335342407227, | |
| "learning_rate": 4.653451226468911e-06, | |
| "loss": 0.0164, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.701418014779309, | |
| "grad_norm": 1.800487756729126, | |
| "learning_rate": 4.639189960068454e-06, | |
| "loss": 0.0112, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.7054124226083482, | |
| "grad_norm": 2.834336996078491, | |
| "learning_rate": 4.624928693667998e-06, | |
| "loss": 0.0115, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.7094068304373877, | |
| "grad_norm": 1.4550248384475708, | |
| "learning_rate": 4.6106674272675416e-06, | |
| "loss": 0.0139, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.713401238266427, | |
| "grad_norm": 1.9578481912612915, | |
| "learning_rate": 4.596406160867085e-06, | |
| "loss": 0.0123, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.7173956460954662, | |
| "grad_norm": 1.880245566368103, | |
| "learning_rate": 4.582144894466629e-06, | |
| "loss": 0.0133, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.7213900539245057, | |
| "grad_norm": 2.5412514209747314, | |
| "learning_rate": 4.567883628066173e-06, | |
| "loss": 0.0188, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.725384461753545, | |
| "grad_norm": 0.7581049799919128, | |
| "learning_rate": 4.553622361665716e-06, | |
| "loss": 0.0093, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.7293788695825842, | |
| "grad_norm": 2.3450958728790283, | |
| "learning_rate": 4.53936109526526e-06, | |
| "loss": 0.0126, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.7333732774116237, | |
| "grad_norm": 2.2938358783721924, | |
| "learning_rate": 4.525099828864804e-06, | |
| "loss": 0.0123, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.7373676852406632, | |
| "grad_norm": 1.1946285963058472, | |
| "learning_rate": 4.510838562464347e-06, | |
| "loss": 0.0189, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.7413620930697025, | |
| "grad_norm": 1.1202322244644165, | |
| "learning_rate": 4.496577296063891e-06, | |
| "loss": 0.0138, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.7453565008987417, | |
| "grad_norm": 1.9504307508468628, | |
| "learning_rate": 4.482316029663434e-06, | |
| "loss": 0.0093, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.7493509087277812, | |
| "grad_norm": 4.9356584548950195, | |
| "learning_rate": 4.468054763262978e-06, | |
| "loss": 0.0102, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.7533453165568205, | |
| "grad_norm": 1.5076123476028442, | |
| "learning_rate": 4.4537934968625215e-06, | |
| "loss": 0.0121, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.7573397243858597, | |
| "grad_norm": 0.6758047342300415, | |
| "learning_rate": 4.439532230462066e-06, | |
| "loss": 0.0107, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.7613341322148992, | |
| "grad_norm": 2.7167506217956543, | |
| "learning_rate": 4.425270964061609e-06, | |
| "loss": 0.0147, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.7653285400439385, | |
| "grad_norm": 2.033478260040283, | |
| "learning_rate": 4.411009697661153e-06, | |
| "loss": 0.0169, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.7693229478729777, | |
| "grad_norm": 1.155606985092163, | |
| "learning_rate": 4.396748431260696e-06, | |
| "loss": 0.009, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.7733173557020172, | |
| "grad_norm": 1.7717381715774536, | |
| "learning_rate": 4.38248716486024e-06, | |
| "loss": 0.0116, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.7773117635310565, | |
| "grad_norm": 3.297030448913574, | |
| "learning_rate": 4.3682258984597835e-06, | |
| "loss": 0.0123, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.7813061713600957, | |
| "grad_norm": 2.9757895469665527, | |
| "learning_rate": 4.353964632059327e-06, | |
| "loss": 0.0177, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.7853005791891352, | |
| "grad_norm": 1.0998330116271973, | |
| "learning_rate": 4.339703365658871e-06, | |
| "loss": 0.0115, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.7892949870181747, | |
| "grad_norm": 1.034776210784912, | |
| "learning_rate": 4.325442099258414e-06, | |
| "loss": 0.0107, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.793289394847214, | |
| "grad_norm": 2.344693899154663, | |
| "learning_rate": 4.311180832857958e-06, | |
| "loss": 0.0168, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.7972838026762532, | |
| "grad_norm": 0.8742194771766663, | |
| "learning_rate": 4.296919566457502e-06, | |
| "loss": 0.0139, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.8012782105052927, | |
| "grad_norm": 1.9352366924285889, | |
| "learning_rate": 4.2826583000570456e-06, | |
| "loss": 0.0165, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.805272618334332, | |
| "grad_norm": 1.127448320388794, | |
| "learning_rate": 4.268397033656589e-06, | |
| "loss": 0.0122, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.8092670261633712, | |
| "grad_norm": 1.8520009517669678, | |
| "learning_rate": 4.254135767256133e-06, | |
| "loss": 0.017, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.8132614339924107, | |
| "grad_norm": 1.1415423154830933, | |
| "learning_rate": 4.239874500855676e-06, | |
| "loss": 0.0136, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.81725584182145, | |
| "grad_norm": 1.7549067735671997, | |
| "learning_rate": 4.225613234455219e-06, | |
| "loss": 0.0109, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.8212502496504892, | |
| "grad_norm": 2.363199234008789, | |
| "learning_rate": 4.2113519680547635e-06, | |
| "loss": 0.0144, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.8252446574795287, | |
| "grad_norm": 1.7024168968200684, | |
| "learning_rate": 4.197090701654308e-06, | |
| "loss": 0.0117, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.8292390653085682, | |
| "grad_norm": 1.9876381158828735, | |
| "learning_rate": 4.182829435253851e-06, | |
| "loss": 0.0148, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.8332334731376072, | |
| "grad_norm": 1.2069517374038696, | |
| "learning_rate": 4.168568168853395e-06, | |
| "loss": 0.0083, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.8372278809666467, | |
| "grad_norm": 1.4075438976287842, | |
| "learning_rate": 4.154306902452938e-06, | |
| "loss": 0.0129, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.8412222887956862, | |
| "grad_norm": 1.8925701379776, | |
| "learning_rate": 4.1400456360524814e-06, | |
| "loss": 0.0094, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.8452166966247254, | |
| "grad_norm": 1.1471129655838013, | |
| "learning_rate": 4.1257843696520255e-06, | |
| "loss": 0.0093, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.8492111044537647, | |
| "grad_norm": 2.0031282901763916, | |
| "learning_rate": 4.111523103251569e-06, | |
| "loss": 0.0088, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.8532055122828042, | |
| "grad_norm": 2.467355251312256, | |
| "learning_rate": 4.097261836851113e-06, | |
| "loss": 0.0139, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.8571999201118434, | |
| "grad_norm": 0.6703509092330933, | |
| "learning_rate": 4.083000570450656e-06, | |
| "loss": 0.0132, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.8611943279408827, | |
| "grad_norm": 2.917024612426758, | |
| "learning_rate": 4.0687393040502e-06, | |
| "loss": 0.0116, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.8651887357699222, | |
| "grad_norm": 0.7198773622512817, | |
| "learning_rate": 4.054478037649744e-06, | |
| "loss": 0.0109, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.8691831435989614, | |
| "grad_norm": 5.306532859802246, | |
| "learning_rate": 4.0402167712492876e-06, | |
| "loss": 0.0164, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.8731775514280007, | |
| "grad_norm": 1.05637526512146, | |
| "learning_rate": 4.025955504848831e-06, | |
| "loss": 0.0152, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.8771719592570402, | |
| "grad_norm": 1.5791622400283813, | |
| "learning_rate": 4.011694238448375e-06, | |
| "loss": 0.0071, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.8811663670860796, | |
| "grad_norm": 1.7619889974594116, | |
| "learning_rate": 3.997432972047918e-06, | |
| "loss": 0.0112, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.8851607749151187, | |
| "grad_norm": 3.435976266860962, | |
| "learning_rate": 3.983171705647461e-06, | |
| "loss": 0.014, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.8891551827441582, | |
| "grad_norm": 2.0465235710144043, | |
| "learning_rate": 3.9689104392470055e-06, | |
| "loss": 0.0079, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.8931495905731976, | |
| "grad_norm": 1.5081980228424072, | |
| "learning_rate": 3.954649172846549e-06, | |
| "loss": 0.0091, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.897143998402237, | |
| "grad_norm": 1.919283151626587, | |
| "learning_rate": 3.940387906446093e-06, | |
| "loss": 0.0213, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.9011384062312762, | |
| "grad_norm": 4.3748064041137695, | |
| "learning_rate": 3.926126640045637e-06, | |
| "loss": 0.0181, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.9051328140603156, | |
| "grad_norm": 1.2997761964797974, | |
| "learning_rate": 3.91186537364518e-06, | |
| "loss": 0.0085, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.909127221889355, | |
| "grad_norm": 2.0615782737731934, | |
| "learning_rate": 3.897604107244723e-06, | |
| "loss": 0.0127, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.9131216297183942, | |
| "grad_norm": 2.5756430625915527, | |
| "learning_rate": 3.8833428408442675e-06, | |
| "loss": 0.0119, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.9171160375474336, | |
| "grad_norm": 2.98134446144104, | |
| "learning_rate": 3.869081574443811e-06, | |
| "loss": 0.014, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.921110445376473, | |
| "grad_norm": 1.2567311525344849, | |
| "learning_rate": 3.854820308043354e-06, | |
| "loss": 0.015, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.9251048532055122, | |
| "grad_norm": 3.069068670272827, | |
| "learning_rate": 3.840559041642898e-06, | |
| "loss": 0.0187, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.9290992610345516, | |
| "grad_norm": 1.957788348197937, | |
| "learning_rate": 3.826297775242442e-06, | |
| "loss": 0.0085, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.9330936688635911, | |
| "grad_norm": 2.6812667846679688, | |
| "learning_rate": 3.8120365088419854e-06, | |
| "loss": 0.0098, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.9370880766926302, | |
| "grad_norm": 1.6739838123321533, | |
| "learning_rate": 3.797775242441529e-06, | |
| "loss": 0.0102, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.9410824845216696, | |
| "grad_norm": 0.818157434463501, | |
| "learning_rate": 3.7835139760410728e-06, | |
| "loss": 0.0076, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.9450768923507091, | |
| "grad_norm": 1.8288235664367676, | |
| "learning_rate": 3.769252709640616e-06, | |
| "loss": 0.0156, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.9490713001797484, | |
| "grad_norm": 3.466181516647339, | |
| "learning_rate": 3.75499144324016e-06, | |
| "loss": 0.0107, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.9530657080087876, | |
| "grad_norm": 1.228251338005066, | |
| "learning_rate": 3.740730176839704e-06, | |
| "loss": 0.0094, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.9570601158378271, | |
| "grad_norm": 2.779387950897217, | |
| "learning_rate": 3.726468910439247e-06, | |
| "loss": 0.0107, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.9610545236668664, | |
| "grad_norm": 1.5868489742279053, | |
| "learning_rate": 3.712207644038791e-06, | |
| "loss": 0.0113, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.9650489314959056, | |
| "grad_norm": 1.5995168685913086, | |
| "learning_rate": 3.6979463776383344e-06, | |
| "loss": 0.0085, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.9690433393249451, | |
| "grad_norm": 3.000091791152954, | |
| "learning_rate": 3.683685111237878e-06, | |
| "loss": 0.0116, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.9730377471539844, | |
| "grad_norm": 2.185147762298584, | |
| "learning_rate": 3.669423844837422e-06, | |
| "loss": 0.0079, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.9770321549830236, | |
| "grad_norm": 0.5639151334762573, | |
| "learning_rate": 3.6551625784369654e-06, | |
| "loss": 0.0116, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.9810265628120631, | |
| "grad_norm": 0.49505335092544556, | |
| "learning_rate": 3.640901312036509e-06, | |
| "loss": 0.008, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.9850209706411026, | |
| "grad_norm": 0.9688293933868408, | |
| "learning_rate": 3.6266400456360527e-06, | |
| "loss": 0.0118, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.9890153784701416, | |
| "grad_norm": 1.2199223041534424, | |
| "learning_rate": 3.6123787792355964e-06, | |
| "loss": 0.0113, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.9930097862991811, | |
| "grad_norm": 2.1835548877716064, | |
| "learning_rate": 3.5981175128351405e-06, | |
| "loss": 0.0109, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.9970041941282206, | |
| "grad_norm": 1.8946824073791504, | |
| "learning_rate": 3.5838562464346838e-06, | |
| "loss": 0.0111, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 2.000798881565808, | |
| "grad_norm": 0.641574501991272, | |
| "learning_rate": 3.5695949800342274e-06, | |
| "loss": 0.0062, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 2.004793289394847, | |
| "grad_norm": 0.33183345198631287, | |
| "learning_rate": 3.555333713633771e-06, | |
| "loss": 0.0023, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 2.0087876972238865, | |
| "grad_norm": 1.069380283355713, | |
| "learning_rate": 3.5410724472333148e-06, | |
| "loss": 0.0031, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 2.012782105052926, | |
| "grad_norm": 0.10800563544034958, | |
| "learning_rate": 3.526811180832858e-06, | |
| "loss": 0.0051, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 2.0167765128819655, | |
| "grad_norm": 0.5448017716407776, | |
| "learning_rate": 3.512549914432402e-06, | |
| "loss": 0.0027, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 2.0207709207110045, | |
| "grad_norm": 3.1985340118408203, | |
| "learning_rate": 3.4982886480319454e-06, | |
| "loss": 0.0039, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 2.024765328540044, | |
| "grad_norm": 1.5143511295318604, | |
| "learning_rate": 3.484027381631489e-06, | |
| "loss": 0.0036, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 2.0287597363690835, | |
| "grad_norm": 1.195678472518921, | |
| "learning_rate": 3.469766115231033e-06, | |
| "loss": 0.0072, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 2.0327541441981225, | |
| "grad_norm": 0.6552492380142212, | |
| "learning_rate": 3.4555048488305764e-06, | |
| "loss": 0.0019, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 2.036748552027162, | |
| "grad_norm": 0.4772717356681824, | |
| "learning_rate": 3.44124358243012e-06, | |
| "loss": 0.002, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 2.0407429598562015, | |
| "grad_norm": 0.20249614119529724, | |
| "learning_rate": 3.4269823160296637e-06, | |
| "loss": 0.0029, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 2.0447373676852405, | |
| "grad_norm": 1.4228593111038208, | |
| "learning_rate": 3.4127210496292074e-06, | |
| "loss": 0.0034, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 2.04873177551428, | |
| "grad_norm": 0.5670227408409119, | |
| "learning_rate": 3.3984597832287506e-06, | |
| "loss": 0.0022, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 2.0527261833433195, | |
| "grad_norm": 0.36346498131752014, | |
| "learning_rate": 3.3841985168282947e-06, | |
| "loss": 0.0038, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 2.0567205911723585, | |
| "grad_norm": 1.5020396709442139, | |
| "learning_rate": 3.3699372504278384e-06, | |
| "loss": 0.0033, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 2.060714999001398, | |
| "grad_norm": 1.0543062686920166, | |
| "learning_rate": 3.3556759840273816e-06, | |
| "loss": 0.0034, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 2.0647094068304375, | |
| "grad_norm": 0.19700485467910767, | |
| "learning_rate": 3.3414147176269257e-06, | |
| "loss": 0.0027, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 2.068703814659477, | |
| "grad_norm": 0.7679917216300964, | |
| "learning_rate": 3.327153451226469e-06, | |
| "loss": 0.0056, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 2.072698222488516, | |
| "grad_norm": 0.3091678321361542, | |
| "learning_rate": 3.3128921848260127e-06, | |
| "loss": 0.0023, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 2.0766926303175555, | |
| "grad_norm": 0.8351314067840576, | |
| "learning_rate": 3.2986309184255568e-06, | |
| "loss": 0.0038, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 2.080687038146595, | |
| "grad_norm": 0.4689249098300934, | |
| "learning_rate": 3.2843696520251e-06, | |
| "loss": 0.0047, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 2.084681445975634, | |
| "grad_norm": 1.582379698753357, | |
| "learning_rate": 3.2701083856246437e-06, | |
| "loss": 0.003, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 2.0886758538046735, | |
| "grad_norm": 0.4387413561344147, | |
| "learning_rate": 3.2558471192241873e-06, | |
| "loss": 0.0027, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 2.092670261633713, | |
| "grad_norm": 1.4019826650619507, | |
| "learning_rate": 3.241585852823731e-06, | |
| "loss": 0.0019, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 2.096664669462752, | |
| "grad_norm": 0.219980388879776, | |
| "learning_rate": 3.2273245864232743e-06, | |
| "loss": 0.0046, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 2.1006590772917915, | |
| "grad_norm": 1.19594407081604, | |
| "learning_rate": 3.2130633200228184e-06, | |
| "loss": 0.0056, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 2.104653485120831, | |
| "grad_norm": 0.3207838237285614, | |
| "learning_rate": 3.198802053622362e-06, | |
| "loss": 0.0016, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 2.10864789294987, | |
| "grad_norm": 0.193267360329628, | |
| "learning_rate": 3.1845407872219053e-06, | |
| "loss": 0.0025, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 2.1126423007789095, | |
| "grad_norm": 0.967071533203125, | |
| "learning_rate": 3.1702795208214494e-06, | |
| "loss": 0.0058, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 2.116636708607949, | |
| "grad_norm": 0.24417193233966827, | |
| "learning_rate": 3.1560182544209926e-06, | |
| "loss": 0.0038, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 2.1206311164369884, | |
| "grad_norm": 0.5754942893981934, | |
| "learning_rate": 3.1417569880205367e-06, | |
| "loss": 0.0032, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 2.1246255242660275, | |
| "grad_norm": 0.490962415933609, | |
| "learning_rate": 3.1274957216200804e-06, | |
| "loss": 0.0041, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 2.128619932095067, | |
| "grad_norm": 1.6758365631103516, | |
| "learning_rate": 3.1132344552196236e-06, | |
| "loss": 0.0034, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 2.1326143399241064, | |
| "grad_norm": 0.32650962471961975, | |
| "learning_rate": 3.0989731888191677e-06, | |
| "loss": 0.0026, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 2.1366087477531455, | |
| "grad_norm": 0.4390106499195099, | |
| "learning_rate": 3.084711922418711e-06, | |
| "loss": 0.0031, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 2.140603155582185, | |
| "grad_norm": 0.29730117321014404, | |
| "learning_rate": 3.0704506560182546e-06, | |
| "loss": 0.0031, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 2.1445975634112244, | |
| "grad_norm": 1.7548117637634277, | |
| "learning_rate": 3.0561893896177987e-06, | |
| "loss": 0.0056, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 2.1485919712402635, | |
| "grad_norm": 0.4591972231864929, | |
| "learning_rate": 3.041928123217342e-06, | |
| "loss": 0.0033, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 2.152586379069303, | |
| "grad_norm": 1.1678667068481445, | |
| "learning_rate": 3.0276668568168852e-06, | |
| "loss": 0.0021, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 2.1565807868983424, | |
| "grad_norm": 0.4509355127811432, | |
| "learning_rate": 3.0134055904164293e-06, | |
| "loss": 0.0051, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 2.1605751947273815, | |
| "grad_norm": 0.2729663848876953, | |
| "learning_rate": 2.999144324015973e-06, | |
| "loss": 0.0025, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 2.164569602556421, | |
| "grad_norm": 0.8901238441467285, | |
| "learning_rate": 2.9848830576155162e-06, | |
| "loss": 0.0026, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 2.1685640103854604, | |
| "grad_norm": 0.44139617681503296, | |
| "learning_rate": 2.9706217912150603e-06, | |
| "loss": 0.0029, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 2.1725584182145, | |
| "grad_norm": 0.25581488013267517, | |
| "learning_rate": 2.9563605248146036e-06, | |
| "loss": 0.0055, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 2.176552826043539, | |
| "grad_norm": 0.28812941908836365, | |
| "learning_rate": 2.9420992584141473e-06, | |
| "loss": 0.0033, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 2.1805472338725784, | |
| "grad_norm": 0.14822331070899963, | |
| "learning_rate": 2.9278379920136913e-06, | |
| "loss": 0.0046, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 2.184541641701618, | |
| "grad_norm": 0.3201834559440613, | |
| "learning_rate": 2.9135767256132346e-06, | |
| "loss": 0.0019, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 2.188536049530657, | |
| "grad_norm": 0.5203161239624023, | |
| "learning_rate": 2.8993154592127783e-06, | |
| "loss": 0.0028, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 2.1925304573596964, | |
| "grad_norm": 0.28744950890541077, | |
| "learning_rate": 2.885054192812322e-06, | |
| "loss": 0.0021, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 2.196524865188736, | |
| "grad_norm": 0.34452763199806213, | |
| "learning_rate": 2.8707929264118656e-06, | |
| "loss": 0.0036, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 2.200519273017775, | |
| "grad_norm": 0.26282957196235657, | |
| "learning_rate": 2.856531660011409e-06, | |
| "loss": 0.003, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 2.2045136808468144, | |
| "grad_norm": 0.18589575588703156, | |
| "learning_rate": 2.842270393610953e-06, | |
| "loss": 0.0025, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 2.208508088675854, | |
| "grad_norm": 0.20616132020950317, | |
| "learning_rate": 2.8280091272104966e-06, | |
| "loss": 0.0033, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 2.212502496504893, | |
| "grad_norm": 1.494046688079834, | |
| "learning_rate": 2.81374786081004e-06, | |
| "loss": 0.0031, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 2.2164969043339324, | |
| "grad_norm": 0.4652380347251892, | |
| "learning_rate": 2.799486594409584e-06, | |
| "loss": 0.003, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 2.220491312162972, | |
| "grad_norm": 0.32536935806274414, | |
| "learning_rate": 2.785225328009127e-06, | |
| "loss": 0.0013, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 2.2244857199920114, | |
| "grad_norm": 0.3774299621582031, | |
| "learning_rate": 2.770964061608671e-06, | |
| "loss": 0.0037, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 2.2284801278210504, | |
| "grad_norm": 2.422078847885132, | |
| "learning_rate": 2.756702795208215e-06, | |
| "loss": 0.0029, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 2.23247453565009, | |
| "grad_norm": 0.6645544767379761, | |
| "learning_rate": 2.7424415288077582e-06, | |
| "loss": 0.0037, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 2.2364689434791294, | |
| "grad_norm": 0.18347947299480438, | |
| "learning_rate": 2.728180262407302e-06, | |
| "loss": 0.0022, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 2.2404633513081684, | |
| "grad_norm": 0.4138175845146179, | |
| "learning_rate": 2.7139189960068456e-06, | |
| "loss": 0.0033, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 2.244457759137208, | |
| "grad_norm": 0.11286269873380661, | |
| "learning_rate": 2.6996577296063892e-06, | |
| "loss": 0.0021, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 2.2484521669662474, | |
| "grad_norm": 0.28822946548461914, | |
| "learning_rate": 2.6853964632059333e-06, | |
| "loss": 0.0015, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 2.2524465747952864, | |
| "grad_norm": 1.101440191268921, | |
| "learning_rate": 2.6711351968054766e-06, | |
| "loss": 0.002, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 2.256440982624326, | |
| "grad_norm": 1.339613437652588, | |
| "learning_rate": 2.6568739304050202e-06, | |
| "loss": 0.0033, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 2.2604353904533654, | |
| "grad_norm": 1.2192366123199463, | |
| "learning_rate": 2.642612664004564e-06, | |
| "loss": 0.0035, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 2.2644297982824044, | |
| "grad_norm": 0.2782962918281555, | |
| "learning_rate": 2.6283513976041076e-06, | |
| "loss": 0.0021, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 2.268424206111444, | |
| "grad_norm": 0.1587899625301361, | |
| "learning_rate": 2.614090131203651e-06, | |
| "loss": 0.0045, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 2.2724186139404834, | |
| "grad_norm": 0.3273210823535919, | |
| "learning_rate": 2.599828864803195e-06, | |
| "loss": 0.0027, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 2.276413021769523, | |
| "grad_norm": 0.6556480526924133, | |
| "learning_rate": 2.5855675984027386e-06, | |
| "loss": 0.0025, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 2.280407429598562, | |
| "grad_norm": 0.45347774028778076, | |
| "learning_rate": 2.571306332002282e-06, | |
| "loss": 0.0022, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 2.2844018374276014, | |
| "grad_norm": 0.4094420373439789, | |
| "learning_rate": 2.557045065601826e-06, | |
| "loss": 0.0022, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 2.288396245256641, | |
| "grad_norm": 1.0537807941436768, | |
| "learning_rate": 2.542783799201369e-06, | |
| "loss": 0.0044, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 2.29239065308568, | |
| "grad_norm": 0.7758886814117432, | |
| "learning_rate": 2.528522532800913e-06, | |
| "loss": 0.0036, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 2.2963850609147194, | |
| "grad_norm": 0.3213239908218384, | |
| "learning_rate": 2.514261266400457e-06, | |
| "loss": 0.0032, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 2.300379468743759, | |
| "grad_norm": 1.8324217796325684, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0026, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 2.304373876572798, | |
| "grad_norm": 0.737530529499054, | |
| "learning_rate": 2.485738733599544e-06, | |
| "loss": 0.0021, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 2.3083682844018374, | |
| "grad_norm": 0.8499754071235657, | |
| "learning_rate": 2.4714774671990875e-06, | |
| "loss": 0.0062, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 2.312362692230877, | |
| "grad_norm": 0.3625565469264984, | |
| "learning_rate": 2.4572162007986312e-06, | |
| "loss": 0.0035, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 2.316357100059916, | |
| "grad_norm": 0.44742417335510254, | |
| "learning_rate": 2.442954934398175e-06, | |
| "loss": 0.002, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 2.3203515078889554, | |
| "grad_norm": 0.12408458441495895, | |
| "learning_rate": 2.428693667997718e-06, | |
| "loss": 0.0027, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 2.324345915717995, | |
| "grad_norm": 0.10359267890453339, | |
| "learning_rate": 2.414432401597262e-06, | |
| "loss": 0.0017, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 2.3283403235470344, | |
| "grad_norm": 0.7235808968544006, | |
| "learning_rate": 2.400171135196806e-06, | |
| "loss": 0.0032, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 2.3323347313760734, | |
| "grad_norm": 0.9523211717605591, | |
| "learning_rate": 2.3859098687963496e-06, | |
| "loss": 0.0038, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 2.336329139205113, | |
| "grad_norm": 0.48734623193740845, | |
| "learning_rate": 2.371648602395893e-06, | |
| "loss": 0.0022, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 2.3403235470341524, | |
| "grad_norm": 0.20006322860717773, | |
| "learning_rate": 2.3573873359954365e-06, | |
| "loss": 0.0025, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 2.3443179548631914, | |
| "grad_norm": 0.5010049939155579, | |
| "learning_rate": 2.34312606959498e-06, | |
| "loss": 0.0023, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 2.348312362692231, | |
| "grad_norm": 0.34393346309661865, | |
| "learning_rate": 2.328864803194524e-06, | |
| "loss": 0.0022, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 2.3523067705212704, | |
| "grad_norm": 0.5311034321784973, | |
| "learning_rate": 2.3146035367940675e-06, | |
| "loss": 0.002, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 2.3563011783503094, | |
| "grad_norm": 0.7192005515098572, | |
| "learning_rate": 2.300342270393611e-06, | |
| "loss": 0.0035, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 2.360295586179349, | |
| "grad_norm": 0.768454909324646, | |
| "learning_rate": 2.286081003993155e-06, | |
| "loss": 0.0032, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 2.3642899940083884, | |
| "grad_norm": 0.2671791911125183, | |
| "learning_rate": 2.2718197375926985e-06, | |
| "loss": 0.0037, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 2.3682844018374274, | |
| "grad_norm": 1.2844589948654175, | |
| "learning_rate": 2.257558471192242e-06, | |
| "loss": 0.0023, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 2.372278809666467, | |
| "grad_norm": 0.8719237446784973, | |
| "learning_rate": 2.2432972047917854e-06, | |
| "loss": 0.0022, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 2.3762732174955064, | |
| "grad_norm": 2.245568037033081, | |
| "learning_rate": 2.229035938391329e-06, | |
| "loss": 0.0032, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 2.380267625324546, | |
| "grad_norm": 0.23163773119449615, | |
| "learning_rate": 2.214774671990873e-06, | |
| "loss": 0.0024, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 2.384262033153585, | |
| "grad_norm": 1.0570616722106934, | |
| "learning_rate": 2.2005134055904164e-06, | |
| "loss": 0.0027, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 2.3882564409826244, | |
| "grad_norm": 0.17971302568912506, | |
| "learning_rate": 2.18625213918996e-06, | |
| "loss": 0.0018, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 2.392250848811664, | |
| "grad_norm": 0.41978639364242554, | |
| "learning_rate": 2.171990872789504e-06, | |
| "loss": 0.0021, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 2.396245256640703, | |
| "grad_norm": 0.2864331305027008, | |
| "learning_rate": 2.1577296063890475e-06, | |
| "loss": 0.0025, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 2.396245256640703, | |
| "eval_loss": 0.009802933782339096, | |
| "eval_runtime": 7536.5103, | |
| "eval_samples_per_second": 2.657, | |
| "eval_steps_per_second": 0.332, | |
| "eval_wer": 0.7426161258464242, | |
| "step": 6000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 7512, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.539223607902208e+19, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |