| { | |
| "best_global_step": 6000, | |
| "best_metric": 0.5057546679788949, | |
| "best_model_checkpoint": "./SALAMA_NEW8/checkpoint-6000", | |
| "epoch": 3.0, | |
| "eval_steps": 2000, | |
| "global_step": 7512, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003994407829039345, | |
| "grad_norm": 3.46816349029541, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.0307, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.00798881565807869, | |
| "grad_norm": 3.722654104232788, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.0243, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.011983223487118035, | |
| "grad_norm": 2.703404664993286, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.0376, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01597763131615738, | |
| "grad_norm": 2.4501798152923584, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.0264, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.019972039145196723, | |
| "grad_norm": 2.6027143001556396, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.0369, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02396644697423607, | |
| "grad_norm": 2.276334047317505, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.0192, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.027960854803275415, | |
| "grad_norm": 1.9809595346450806, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.0227, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03195526263231476, | |
| "grad_norm": 3.635103702545166, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.022, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.035949670461354104, | |
| "grad_norm": 2.506479263305664, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.0241, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.039944078290393446, | |
| "grad_norm": 1.891008734703064, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.0218, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.043938486119432796, | |
| "grad_norm": 4.943603992462158, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.0229, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04793289394847214, | |
| "grad_norm": 0.3047073781490326, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.0369, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.05192730177751148, | |
| "grad_norm": 2.3492813110351562, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.0251, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.05592170960655083, | |
| "grad_norm": 1.2943974733352661, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.0216, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05991611743559017, | |
| "grad_norm": 3.320284843444824, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.0248, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06391052526462952, | |
| "grad_norm": 3.313554048538208, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.035, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06790493309366886, | |
| "grad_norm": 2.999142646789551, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.02, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.07189934092270821, | |
| "grad_norm": 2.4451065063476562, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0183, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07589374875174755, | |
| "grad_norm": 2.6344587802886963, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.0232, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.07988815658078689, | |
| "grad_norm": 1.9555765390396118, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.019, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08388256440982625, | |
| "grad_norm": 1.7121124267578125, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.0194, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.08787697223886559, | |
| "grad_norm": 3.0031440258026123, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.0385, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09187138006790493, | |
| "grad_norm": 2.69048810005188, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0283, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09586578789694428, | |
| "grad_norm": 1.9045894145965576, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.04, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.09986019572598362, | |
| "grad_norm": 2.2926864624023438, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.0303, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10385460355502296, | |
| "grad_norm": 1.7242863178253174, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.0203, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.10784901138406232, | |
| "grad_norm": 1.9205068349838257, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.0332, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.11184341921310166, | |
| "grad_norm": 2.80607008934021, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.0281, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.115837827042141, | |
| "grad_norm": 3.7204532623291016, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.0187, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.11983223487118035, | |
| "grad_norm": 2.965312957763672, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.0329, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12382664270021969, | |
| "grad_norm": 4.226632118225098, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.0211, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.12782105052925904, | |
| "grad_norm": 1.7873696088790894, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.0183, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1318154583582984, | |
| "grad_norm": 2.310504674911499, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.0199, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.13580986618733773, | |
| "grad_norm": 2.269408702850342, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.0258, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.13980427401637707, | |
| "grad_norm": 2.605928421020508, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.0273, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.14379868184541642, | |
| "grad_norm": 2.4324028491973877, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.0183, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.14779308967445576, | |
| "grad_norm": 1.8827005624771118, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.0231, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1517874975034951, | |
| "grad_norm": 1.3971014022827148, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.0232, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.15578190533253444, | |
| "grad_norm": 2.3092613220214844, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.0268, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.15977631316157379, | |
| "grad_norm": 2.469114065170288, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.0207, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.16377072099061313, | |
| "grad_norm": 3.1286327838897705, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.031, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.1677651288196525, | |
| "grad_norm": 3.00813627243042, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.0285, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.17175953664869184, | |
| "grad_norm": 4.581786155700684, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.0434, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.17575394447773118, | |
| "grad_norm": 1.9707911014556885, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.0294, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.17974835230677053, | |
| "grad_norm": 2.7084460258483887, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.0287, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.18374276013580987, | |
| "grad_norm": 5.108406066894531, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.037, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.1877371679648492, | |
| "grad_norm": 1.6236001253128052, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.0192, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.19173157579388855, | |
| "grad_norm": 3.1745877265930176, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.0287, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.1957259836229279, | |
| "grad_norm": 3.5089573860168457, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.0226, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.19972039145196724, | |
| "grad_norm": 1.9109275341033936, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.0365, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.20371479928100658, | |
| "grad_norm": 3.1366279125213623, | |
| "learning_rate": 9.98716486023959e-06, | |
| "loss": 0.0333, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.20770920711004592, | |
| "grad_norm": 2.936997175216675, | |
| "learning_rate": 9.972903593839133e-06, | |
| "loss": 0.0265, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.2117036149390853, | |
| "grad_norm": 2.2245354652404785, | |
| "learning_rate": 9.958642327438678e-06, | |
| "loss": 0.0246, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.21569802276812464, | |
| "grad_norm": 2.7430694103240967, | |
| "learning_rate": 9.944381061038221e-06, | |
| "loss": 0.0302, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.21969243059716398, | |
| "grad_norm": 2.499619483947754, | |
| "learning_rate": 9.930119794637765e-06, | |
| "loss": 0.0407, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.22368683842620332, | |
| "grad_norm": 3.148715019226074, | |
| "learning_rate": 9.91585852823731e-06, | |
| "loss": 0.0254, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.22768124625524266, | |
| "grad_norm": 2.967156171798706, | |
| "learning_rate": 9.901597261836851e-06, | |
| "loss": 0.0274, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.231675654084282, | |
| "grad_norm": 2.920797824859619, | |
| "learning_rate": 9.887335995436396e-06, | |
| "loss": 0.0339, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.23567006191332135, | |
| "grad_norm": 2.7116758823394775, | |
| "learning_rate": 9.87307472903594e-06, | |
| "loss": 0.0339, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.2396644697423607, | |
| "grad_norm": 2.9919016361236572, | |
| "learning_rate": 9.858813462635483e-06, | |
| "loss": 0.034, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.24365887757140003, | |
| "grad_norm": 2.4491732120513916, | |
| "learning_rate": 9.844552196235026e-06, | |
| "loss": 0.0313, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.24765328540043938, | |
| "grad_norm": 2.839397668838501, | |
| "learning_rate": 9.83029092983457e-06, | |
| "loss": 0.0336, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.2516476932294787, | |
| "grad_norm": 3.5191802978515625, | |
| "learning_rate": 9.816029663434114e-06, | |
| "loss": 0.0326, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.2556421010585181, | |
| "grad_norm": 3.609706163406372, | |
| "learning_rate": 9.801768397033657e-06, | |
| "loss": 0.0282, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2596365088875574, | |
| "grad_norm": 2.35837459564209, | |
| "learning_rate": 9.787507130633202e-06, | |
| "loss": 0.0247, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2636309167165968, | |
| "grad_norm": 2.5145423412323, | |
| "learning_rate": 9.773245864232744e-06, | |
| "loss": 0.0298, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.2676253245456361, | |
| "grad_norm": 4.194463729858398, | |
| "learning_rate": 9.758984597832289e-06, | |
| "loss": 0.0336, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.27161973237467546, | |
| "grad_norm": 3.227799892425537, | |
| "learning_rate": 9.744723331431832e-06, | |
| "loss": 0.028, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.2756141402037148, | |
| "grad_norm": 2.315329074859619, | |
| "learning_rate": 9.730462065031375e-06, | |
| "loss": 0.0353, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.27960854803275415, | |
| "grad_norm": 3.3833742141723633, | |
| "learning_rate": 9.71620079863092e-06, | |
| "loss": 0.0263, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2836029558617935, | |
| "grad_norm": 2.724463701248169, | |
| "learning_rate": 9.701939532230463e-06, | |
| "loss": 0.0292, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.28759736369083283, | |
| "grad_norm": 3.384976863861084, | |
| "learning_rate": 9.687678265830007e-06, | |
| "loss": 0.0355, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2915917715198722, | |
| "grad_norm": 2.455484390258789, | |
| "learning_rate": 9.67341699942955e-06, | |
| "loss": 0.0344, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.2955861793489115, | |
| "grad_norm": 2.9067740440368652, | |
| "learning_rate": 9.659155733029095e-06, | |
| "loss": 0.0372, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.2995805871779509, | |
| "grad_norm": 3.116934061050415, | |
| "learning_rate": 9.644894466628636e-06, | |
| "loss": 0.0291, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.3035749950069902, | |
| "grad_norm": 3.9219629764556885, | |
| "learning_rate": 9.630633200228181e-06, | |
| "loss": 0.0296, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.30756940283602957, | |
| "grad_norm": 2.3769092559814453, | |
| "learning_rate": 9.616371933827725e-06, | |
| "loss": 0.0364, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.3115638106650689, | |
| "grad_norm": 2.7386128902435303, | |
| "learning_rate": 9.602110667427268e-06, | |
| "loss": 0.0368, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.31555821849410826, | |
| "grad_norm": 2.2549479007720947, | |
| "learning_rate": 9.587849401026813e-06, | |
| "loss": 0.042, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.31955262632314757, | |
| "grad_norm": 2.8821749687194824, | |
| "learning_rate": 9.573588134626356e-06, | |
| "loss": 0.0291, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.32354703415218694, | |
| "grad_norm": 2.2567620277404785, | |
| "learning_rate": 9.5593268682259e-06, | |
| "loss": 0.0287, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.32754144198122626, | |
| "grad_norm": 2.1578598022460938, | |
| "learning_rate": 9.545065601825442e-06, | |
| "loss": 0.0387, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.3315358498102656, | |
| "grad_norm": 3.6625397205352783, | |
| "learning_rate": 9.530804335424987e-06, | |
| "loss": 0.0288, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.335530257639305, | |
| "grad_norm": 2.7808022499084473, | |
| "learning_rate": 9.516543069024529e-06, | |
| "loss": 0.0376, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.3395246654683443, | |
| "grad_norm": 3.6855411529541016, | |
| "learning_rate": 9.502281802624074e-06, | |
| "loss": 0.0372, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.3435190732973837, | |
| "grad_norm": 2.3804266452789307, | |
| "learning_rate": 9.488020536223617e-06, | |
| "loss": 0.0262, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.347513481126423, | |
| "grad_norm": 2.1207637786865234, | |
| "learning_rate": 9.47375926982316e-06, | |
| "loss": 0.0249, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.35150788895546237, | |
| "grad_norm": 2.7773971557617188, | |
| "learning_rate": 9.459498003422705e-06, | |
| "loss": 0.0329, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.3555022967845017, | |
| "grad_norm": 1.848315954208374, | |
| "learning_rate": 9.445236737022249e-06, | |
| "loss": 0.0334, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.35949670461354105, | |
| "grad_norm": 2.8464772701263428, | |
| "learning_rate": 9.430975470621792e-06, | |
| "loss": 0.0276, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.36349111244258037, | |
| "grad_norm": 2.721813678741455, | |
| "learning_rate": 9.416714204221335e-06, | |
| "loss": 0.0302, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.36748552027161974, | |
| "grad_norm": 2.085888147354126, | |
| "learning_rate": 9.40245293782088e-06, | |
| "loss": 0.0271, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.37147992810065905, | |
| "grad_norm": 3.2898266315460205, | |
| "learning_rate": 9.388191671420423e-06, | |
| "loss": 0.0393, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.3754743359296984, | |
| "grad_norm": 2.3367676734924316, | |
| "learning_rate": 9.373930405019966e-06, | |
| "loss": 0.0327, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.3794687437587378, | |
| "grad_norm": 2.8437299728393555, | |
| "learning_rate": 9.35966913861951e-06, | |
| "loss": 0.0242, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3834631515877771, | |
| "grad_norm": 3.629429817199707, | |
| "learning_rate": 9.345407872219053e-06, | |
| "loss": 0.0317, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3874575594168165, | |
| "grad_norm": 1.7099779844284058, | |
| "learning_rate": 9.331146605818598e-06, | |
| "loss": 0.0366, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.3914519672458558, | |
| "grad_norm": 3.043698310852051, | |
| "learning_rate": 9.316885339418141e-06, | |
| "loss": 0.0351, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.39544637507489516, | |
| "grad_norm": 4.258053302764893, | |
| "learning_rate": 9.302624073017684e-06, | |
| "loss": 0.0381, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.3994407829039345, | |
| "grad_norm": 1.7449215650558472, | |
| "learning_rate": 9.288362806617228e-06, | |
| "loss": 0.0434, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.40343519073297385, | |
| "grad_norm": 2.3904478549957275, | |
| "learning_rate": 9.274101540216773e-06, | |
| "loss": 0.0356, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.40742959856201316, | |
| "grad_norm": 4.740054130554199, | |
| "learning_rate": 9.259840273816316e-06, | |
| "loss": 0.0353, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.41142400639105253, | |
| "grad_norm": 3.9382550716400146, | |
| "learning_rate": 9.245579007415859e-06, | |
| "loss": 0.0452, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.41541841422009185, | |
| "grad_norm": 3.4149060249328613, | |
| "learning_rate": 9.231317741015402e-06, | |
| "loss": 0.0312, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4194128220491312, | |
| "grad_norm": 1.7139674425125122, | |
| "learning_rate": 9.217056474614946e-06, | |
| "loss": 0.0277, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.4234072298781706, | |
| "grad_norm": 2.9577815532684326, | |
| "learning_rate": 9.20279520821449e-06, | |
| "loss": 0.0266, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.4274016377072099, | |
| "grad_norm": 2.4664392471313477, | |
| "learning_rate": 9.188533941814034e-06, | |
| "loss": 0.0237, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4313960455362493, | |
| "grad_norm": 2.966825246810913, | |
| "learning_rate": 9.174272675413579e-06, | |
| "loss": 0.0193, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.4353904533652886, | |
| "grad_norm": 2.5782456398010254, | |
| "learning_rate": 9.16001140901312e-06, | |
| "loss": 0.0334, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.43938486119432796, | |
| "grad_norm": 3.482473134994507, | |
| "learning_rate": 9.145750142612665e-06, | |
| "loss": 0.0458, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.4433792690233673, | |
| "grad_norm": 2.6434431076049805, | |
| "learning_rate": 9.131488876212208e-06, | |
| "loss": 0.0385, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.44737367685240664, | |
| "grad_norm": 3.8204150199890137, | |
| "learning_rate": 9.117227609811752e-06, | |
| "loss": 0.0275, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.45136808468144596, | |
| "grad_norm": 2.0705342292785645, | |
| "learning_rate": 9.102966343411297e-06, | |
| "loss": 0.0332, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.45536249251048533, | |
| "grad_norm": 4.166182994842529, | |
| "learning_rate": 9.08870507701084e-06, | |
| "loss": 0.0301, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.45935690033952464, | |
| "grad_norm": 3.771206855773926, | |
| "learning_rate": 9.074443810610383e-06, | |
| "loss": 0.0285, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.463351308168564, | |
| "grad_norm": 3.3919098377227783, | |
| "learning_rate": 9.060182544209926e-06, | |
| "loss": 0.0333, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.46734571599760333, | |
| "grad_norm": 3.0169448852539062, | |
| "learning_rate": 9.045921277809471e-06, | |
| "loss": 0.0376, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.4713401238266427, | |
| "grad_norm": 3.0113446712493896, | |
| "learning_rate": 9.031660011409013e-06, | |
| "loss": 0.0367, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.47533453165568207, | |
| "grad_norm": 3.8403584957122803, | |
| "learning_rate": 9.017398745008558e-06, | |
| "loss": 0.0403, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.4793289394847214, | |
| "grad_norm": 1.8323266506195068, | |
| "learning_rate": 9.003137478608101e-06, | |
| "loss": 0.0254, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.48332334731376075, | |
| "grad_norm": 3.447676420211792, | |
| "learning_rate": 8.988876212207644e-06, | |
| "loss": 0.0309, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.48731775514280007, | |
| "grad_norm": 3.089911937713623, | |
| "learning_rate": 8.97461494580719e-06, | |
| "loss": 0.0368, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.49131216297183944, | |
| "grad_norm": 2.221872091293335, | |
| "learning_rate": 8.960353679406733e-06, | |
| "loss": 0.0225, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.49530657080087875, | |
| "grad_norm": 1.7740514278411865, | |
| "learning_rate": 8.946092413006276e-06, | |
| "loss": 0.03, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.4993009786299181, | |
| "grad_norm": 2.922903299331665, | |
| "learning_rate": 8.931831146605819e-06, | |
| "loss": 0.0234, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.5032953864589574, | |
| "grad_norm": 2.8122918605804443, | |
| "learning_rate": 8.917569880205364e-06, | |
| "loss": 0.0453, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5072897942879968, | |
| "grad_norm": 2.1735727787017822, | |
| "learning_rate": 8.903308613804906e-06, | |
| "loss": 0.0295, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.5112842021170362, | |
| "grad_norm": 2.4211249351501465, | |
| "learning_rate": 8.88904734740445e-06, | |
| "loss": 0.0292, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5152786099460755, | |
| "grad_norm": 5.627583026885986, | |
| "learning_rate": 8.874786081003994e-06, | |
| "loss": 0.0438, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.5192730177751148, | |
| "grad_norm": 2.746601104736328, | |
| "learning_rate": 8.860524814603537e-06, | |
| "loss": 0.0361, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5232674256041542, | |
| "grad_norm": 3.283212661743164, | |
| "learning_rate": 8.846263548203082e-06, | |
| "loss": 0.0389, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5272618334331935, | |
| "grad_norm": 2.8431429862976074, | |
| "learning_rate": 8.832002281802625e-06, | |
| "loss": 0.0344, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5312562412622329, | |
| "grad_norm": 1.620734453201294, | |
| "learning_rate": 8.817741015402168e-06, | |
| "loss": 0.0281, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5352506490912722, | |
| "grad_norm": 3.2256009578704834, | |
| "learning_rate": 8.803479749001712e-06, | |
| "loss": 0.0351, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.5392450569203115, | |
| "grad_norm": 3.3469367027282715, | |
| "learning_rate": 8.789218482601257e-06, | |
| "loss": 0.0334, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.5432394647493509, | |
| "grad_norm": 4.458150386810303, | |
| "learning_rate": 8.7749572162008e-06, | |
| "loss": 0.0226, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5472338725783903, | |
| "grad_norm": 3.770509719848633, | |
| "learning_rate": 8.760695949800343e-06, | |
| "loss": 0.027, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5512282804074295, | |
| "grad_norm": 1.770533561706543, | |
| "learning_rate": 8.746434683399886e-06, | |
| "loss": 0.0387, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5552226882364689, | |
| "grad_norm": 3.1090400218963623, | |
| "learning_rate": 8.73217341699943e-06, | |
| "loss": 0.0335, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5592170960655083, | |
| "grad_norm": 3.3928542137145996, | |
| "learning_rate": 8.717912150598975e-06, | |
| "loss": 0.0405, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.5632115038945477, | |
| "grad_norm": 2.3857524394989014, | |
| "learning_rate": 8.703650884198518e-06, | |
| "loss": 0.0356, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.567205911723587, | |
| "grad_norm": 2.3248329162597656, | |
| "learning_rate": 8.689389617798061e-06, | |
| "loss": 0.0366, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.5712003195526263, | |
| "grad_norm": 2.1319854259490967, | |
| "learning_rate": 8.675128351397604e-06, | |
| "loss": 0.036, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.5751947273816657, | |
| "grad_norm": 1.5602308511734009, | |
| "learning_rate": 8.66086708499715e-06, | |
| "loss": 0.0286, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.579189135210705, | |
| "grad_norm": 2.8112518787384033, | |
| "learning_rate": 8.646605818596692e-06, | |
| "loss": 0.0409, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.5831835430397444, | |
| "grad_norm": 3.130460739135742, | |
| "learning_rate": 8.632344552196236e-06, | |
| "loss": 0.0295, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.5871779508687837, | |
| "grad_norm": 4.073850154876709, | |
| "learning_rate": 8.618083285795779e-06, | |
| "loss": 0.0433, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.591172358697823, | |
| "grad_norm": 2.9691765308380127, | |
| "learning_rate": 8.603822019395322e-06, | |
| "loss": 0.0367, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.5951667665268624, | |
| "grad_norm": 2.8310015201568604, | |
| "learning_rate": 8.589560752994867e-06, | |
| "loss": 0.0286, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.5991611743559018, | |
| "grad_norm": 4.259763717651367, | |
| "learning_rate": 8.57529948659441e-06, | |
| "loss": 0.0374, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.603155582184941, | |
| "grad_norm": 4.179434299468994, | |
| "learning_rate": 8.561038220193954e-06, | |
| "loss": 0.0363, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.6071499900139804, | |
| "grad_norm": 1.6247011423110962, | |
| "learning_rate": 8.546776953793497e-06, | |
| "loss": 0.0225, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.6111443978430198, | |
| "grad_norm": 2.520911693572998, | |
| "learning_rate": 8.532515687393042e-06, | |
| "loss": 0.0331, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.6151388056720591, | |
| "grad_norm": 2.6778786182403564, | |
| "learning_rate": 8.518254420992585e-06, | |
| "loss": 0.0335, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6191332135010985, | |
| "grad_norm": 2.9722695350646973, | |
| "learning_rate": 8.503993154592128e-06, | |
| "loss": 0.0419, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6231276213301378, | |
| "grad_norm": 2.7999324798583984, | |
| "learning_rate": 8.489731888191672e-06, | |
| "loss": 0.0323, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6271220291591771, | |
| "grad_norm": 2.474074363708496, | |
| "learning_rate": 8.475470621791215e-06, | |
| "loss": 0.0371, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6311164369882165, | |
| "grad_norm": 2.244399309158325, | |
| "learning_rate": 8.46120935539076e-06, | |
| "loss": 0.0247, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6351108448172559, | |
| "grad_norm": 2.866180896759033, | |
| "learning_rate": 8.446948088990303e-06, | |
| "loss": 0.039, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.6391052526462951, | |
| "grad_norm": 2.729217529296875, | |
| "learning_rate": 8.432686822589846e-06, | |
| "loss": 0.0378, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6430996604753345, | |
| "grad_norm": 2.707044839859009, | |
| "learning_rate": 8.41842555618939e-06, | |
| "loss": 0.0246, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.6470940683043739, | |
| "grad_norm": 4.565586566925049, | |
| "learning_rate": 8.404164289788934e-06, | |
| "loss": 0.0452, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6510884761334133, | |
| "grad_norm": 2.372103214263916, | |
| "learning_rate": 8.389903023388478e-06, | |
| "loss": 0.0343, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6550828839624525, | |
| "grad_norm": 3.3515093326568604, | |
| "learning_rate": 8.375641756988021e-06, | |
| "loss": 0.0341, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.6590772917914919, | |
| "grad_norm": 2.545055866241455, | |
| "learning_rate": 8.361380490587566e-06, | |
| "loss": 0.028, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.6630716996205313, | |
| "grad_norm": 3.0382556915283203, | |
| "learning_rate": 8.347119224187107e-06, | |
| "loss": 0.0283, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6670661074495706, | |
| "grad_norm": 3.1847259998321533, | |
| "learning_rate": 8.332857957786652e-06, | |
| "loss": 0.0304, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.67106051527861, | |
| "grad_norm": 4.07405424118042, | |
| "learning_rate": 8.318596691386196e-06, | |
| "loss": 0.0321, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.6750549231076493, | |
| "grad_norm": 2.5733461380004883, | |
| "learning_rate": 8.304335424985739e-06, | |
| "loss": 0.0335, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.6790493309366886, | |
| "grad_norm": 4.693726062774658, | |
| "learning_rate": 8.290074158585282e-06, | |
| "loss": 0.0345, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.683043738765728, | |
| "grad_norm": 3.111382007598877, | |
| "learning_rate": 8.275812892184827e-06, | |
| "loss": 0.0423, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.6870381465947674, | |
| "grad_norm": 3.0038416385650635, | |
| "learning_rate": 8.26155162578437e-06, | |
| "loss": 0.0272, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.6910325544238066, | |
| "grad_norm": 2.7561848163604736, | |
| "learning_rate": 8.247290359383914e-06, | |
| "loss": 0.0325, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.695026962252846, | |
| "grad_norm": 2.5607120990753174, | |
| "learning_rate": 8.233029092983458e-06, | |
| "loss": 0.027, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.6990213700818854, | |
| "grad_norm": 1.522266149520874, | |
| "learning_rate": 8.218767826583002e-06, | |
| "loss": 0.0391, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.7030157779109247, | |
| "grad_norm": 3.8156557083129883, | |
| "learning_rate": 8.204506560182545e-06, | |
| "loss": 0.0354, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7070101857399641, | |
| "grad_norm": 2.6884236335754395, | |
| "learning_rate": 8.190245293782088e-06, | |
| "loss": 0.037, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7110045935690034, | |
| "grad_norm": 2.2173361778259277, | |
| "learning_rate": 8.175984027381633e-06, | |
| "loss": 0.0394, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7149990013980427, | |
| "grad_norm": 1.7859878540039062, | |
| "learning_rate": 8.161722760981175e-06, | |
| "loss": 0.0295, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.7189934092270821, | |
| "grad_norm": 3.347618341445923, | |
| "learning_rate": 8.14746149458072e-06, | |
| "loss": 0.0332, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7229878170561215, | |
| "grad_norm": 3.5312180519104004, | |
| "learning_rate": 8.133200228180263e-06, | |
| "loss": 0.0331, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7269822248851607, | |
| "grad_norm": 3.1601924896240234, | |
| "learning_rate": 8.118938961779806e-06, | |
| "loss": 0.0273, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7309766327142001, | |
| "grad_norm": 2.3679819107055664, | |
| "learning_rate": 8.104677695379351e-06, | |
| "loss": 0.0306, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7349710405432395, | |
| "grad_norm": 3.013176441192627, | |
| "learning_rate": 8.090416428978894e-06, | |
| "loss": 0.0477, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7389654483722788, | |
| "grad_norm": 2.3330905437469482, | |
| "learning_rate": 8.076155162578438e-06, | |
| "loss": 0.0295, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7429598562013181, | |
| "grad_norm": 3.5097014904022217, | |
| "learning_rate": 8.06189389617798e-06, | |
| "loss": 0.0367, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7469542640303575, | |
| "grad_norm": 2.2575650215148926, | |
| "learning_rate": 8.047632629777526e-06, | |
| "loss": 0.0315, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7509486718593968, | |
| "grad_norm": 2.637472152709961, | |
| "learning_rate": 8.033371363377069e-06, | |
| "loss": 0.0428, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.7549430796884362, | |
| "grad_norm": 2.459928274154663, | |
| "learning_rate": 8.019110096976612e-06, | |
| "loss": 0.025, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7589374875174756, | |
| "grad_norm": 2.7636241912841797, | |
| "learning_rate": 8.004848830576156e-06, | |
| "loss": 0.0259, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.7629318953465148, | |
| "grad_norm": 3.4332215785980225, | |
| "learning_rate": 7.990587564175699e-06, | |
| "loss": 0.0287, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.7669263031755542, | |
| "grad_norm": 3.5681216716766357, | |
| "learning_rate": 7.976326297775244e-06, | |
| "loss": 0.0314, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.7709207110045936, | |
| "grad_norm": 4.455057144165039, | |
| "learning_rate": 7.962065031374787e-06, | |
| "loss": 0.0381, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.774915118833633, | |
| "grad_norm": 3.433385133743286, | |
| "learning_rate": 7.94780376497433e-06, | |
| "loss": 0.035, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.7789095266626722, | |
| "grad_norm": 2.730055809020996, | |
| "learning_rate": 7.933542498573873e-06, | |
| "loss": 0.0269, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.7829039344917116, | |
| "grad_norm": 2.8840601444244385, | |
| "learning_rate": 7.919281232173418e-06, | |
| "loss": 0.0399, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.786898342320751, | |
| "grad_norm": 3.8547487258911133, | |
| "learning_rate": 7.905019965772962e-06, | |
| "loss": 0.0322, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.7908927501497903, | |
| "grad_norm": 2.3967716693878174, | |
| "learning_rate": 7.890758699372505e-06, | |
| "loss": 0.0272, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.7948871579788296, | |
| "grad_norm": 3.3462061882019043, | |
| "learning_rate": 7.876497432972048e-06, | |
| "loss": 0.0284, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.798881565807869, | |
| "grad_norm": 2.8046774864196777, | |
| "learning_rate": 7.862236166571591e-06, | |
| "loss": 0.0358, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.798881565807869, | |
| "eval_loss": 0.01868388056755066, | |
| "eval_runtime": 7338.2585, | |
| "eval_samples_per_second": 2.729, | |
| "eval_steps_per_second": 0.341, | |
| "eval_wer": 1.722299680144345, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.8028759736369083, | |
| "grad_norm": 2.368256092071533, | |
| "learning_rate": 7.847974900171136e-06, | |
| "loss": 0.0278, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.8068703814659477, | |
| "grad_norm": 2.8456475734710693, | |
| "learning_rate": 7.83371363377068e-06, | |
| "loss": 0.0339, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.8108647892949871, | |
| "grad_norm": 1.9898338317871094, | |
| "learning_rate": 7.819452367370223e-06, | |
| "loss": 0.0289, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.8148591971240263, | |
| "grad_norm": 3.1095328330993652, | |
| "learning_rate": 7.805191100969766e-06, | |
| "loss": 0.0291, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.8188536049530657, | |
| "grad_norm": 2.8899197578430176, | |
| "learning_rate": 7.790929834569311e-06, | |
| "loss": 0.0375, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.8228480127821051, | |
| "grad_norm": 3.9213523864746094, | |
| "learning_rate": 7.776668568168854e-06, | |
| "loss": 0.0444, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.8268424206111444, | |
| "grad_norm": 2.5879340171813965, | |
| "learning_rate": 7.762407301768397e-06, | |
| "loss": 0.0282, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.8308368284401837, | |
| "grad_norm": 1.8168540000915527, | |
| "learning_rate": 7.748146035367942e-06, | |
| "loss": 0.025, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.8348312362692231, | |
| "grad_norm": 3.3461949825286865, | |
| "learning_rate": 7.733884768967484e-06, | |
| "loss": 0.0273, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.8388256440982624, | |
| "grad_norm": 2.239011764526367, | |
| "learning_rate": 7.719623502567029e-06, | |
| "loss": 0.0315, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.8428200519273018, | |
| "grad_norm": 1.676645278930664, | |
| "learning_rate": 7.705362236166572e-06, | |
| "loss": 0.036, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.8468144597563412, | |
| "grad_norm": 3.113101005554199, | |
| "learning_rate": 7.691100969766115e-06, | |
| "loss": 0.0332, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.8508088675853804, | |
| "grad_norm": 3.9707107543945312, | |
| "learning_rate": 7.676839703365659e-06, | |
| "loss": 0.0247, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.8548032754144198, | |
| "grad_norm": 2.901353120803833, | |
| "learning_rate": 7.662578436965204e-06, | |
| "loss": 0.0304, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.8587976832434592, | |
| "grad_norm": 2.0267584323883057, | |
| "learning_rate": 7.648317170564747e-06, | |
| "loss": 0.0256, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.8627920910724985, | |
| "grad_norm": 2.433629274368286, | |
| "learning_rate": 7.63405590416429e-06, | |
| "loss": 0.0267, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.8667864989015378, | |
| "grad_norm": 2.8645429611206055, | |
| "learning_rate": 7.619794637763834e-06, | |
| "loss": 0.0263, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.8707809067305772, | |
| "grad_norm": 2.485597610473633, | |
| "learning_rate": 7.6055333713633774e-06, | |
| "loss": 0.0285, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.8747753145596165, | |
| "grad_norm": 2.311027765274048, | |
| "learning_rate": 7.5912721049629215e-06, | |
| "loss": 0.0237, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.8787697223886559, | |
| "grad_norm": 2.102123260498047, | |
| "learning_rate": 7.577010838562466e-06, | |
| "loss": 0.0251, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.8827641302176952, | |
| "grad_norm": 1.7563374042510986, | |
| "learning_rate": 7.562749572162008e-06, | |
| "loss": 0.0331, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.8867585380467345, | |
| "grad_norm": 4.571224689483643, | |
| "learning_rate": 7.548488305761552e-06, | |
| "loss": 0.0411, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.8907529458757739, | |
| "grad_norm": 2.5844295024871826, | |
| "learning_rate": 7.534227039361096e-06, | |
| "loss": 0.0233, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.8947473537048133, | |
| "grad_norm": 2.9091975688934326, | |
| "learning_rate": 7.519965772960639e-06, | |
| "loss": 0.019, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.8987417615338527, | |
| "grad_norm": 3.633951425552368, | |
| "learning_rate": 7.505704506560183e-06, | |
| "loss": 0.0322, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.9027361693628919, | |
| "grad_norm": 2.073758602142334, | |
| "learning_rate": 7.491443240159727e-06, | |
| "loss": 0.0213, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.9067305771919313, | |
| "grad_norm": 1.943109154701233, | |
| "learning_rate": 7.47718197375927e-06, | |
| "loss": 0.0309, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.9107249850209707, | |
| "grad_norm": 2.816091775894165, | |
| "learning_rate": 7.462920707358814e-06, | |
| "loss": 0.0342, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.91471939285001, | |
| "grad_norm": 2.2993264198303223, | |
| "learning_rate": 7.448659440958358e-06, | |
| "loss": 0.0284, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.9187138006790493, | |
| "grad_norm": 2.5072145462036133, | |
| "learning_rate": 7.434398174557901e-06, | |
| "loss": 0.0261, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.9227082085080887, | |
| "grad_norm": 2.1535396575927734, | |
| "learning_rate": 7.420136908157445e-06, | |
| "loss": 0.0318, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.926702616337128, | |
| "grad_norm": 3.5091567039489746, | |
| "learning_rate": 7.405875641756989e-06, | |
| "loss": 0.0286, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.9306970241661674, | |
| "grad_norm": 3.785184144973755, | |
| "learning_rate": 7.391614375356533e-06, | |
| "loss": 0.0409, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.9346914319952067, | |
| "grad_norm": 3.4833099842071533, | |
| "learning_rate": 7.377353108956075e-06, | |
| "loss": 0.0334, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.938685839824246, | |
| "grad_norm": 2.384570598602295, | |
| "learning_rate": 7.3630918425556194e-06, | |
| "loss": 0.0311, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.9426802476532854, | |
| "grad_norm": 3.203986406326294, | |
| "learning_rate": 7.3488305761551635e-06, | |
| "loss": 0.0269, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.9466746554823248, | |
| "grad_norm": 2.0542373657226562, | |
| "learning_rate": 7.334569309754707e-06, | |
| "loss": 0.027, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.9506690633113641, | |
| "grad_norm": 4.261587142944336, | |
| "learning_rate": 7.320308043354251e-06, | |
| "loss": 0.0264, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.9546634711404034, | |
| "grad_norm": 3.3129560947418213, | |
| "learning_rate": 7.306046776953795e-06, | |
| "loss": 0.037, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.9586578789694428, | |
| "grad_norm": 2.2495546340942383, | |
| "learning_rate": 7.291785510553337e-06, | |
| "loss": 0.034, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.9626522867984821, | |
| "grad_norm": 3.4507882595062256, | |
| "learning_rate": 7.2775242441528815e-06, | |
| "loss": 0.0258, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.9666466946275215, | |
| "grad_norm": 3.949570417404175, | |
| "learning_rate": 7.2632629777524256e-06, | |
| "loss": 0.0315, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.9706411024565608, | |
| "grad_norm": 1.7982138395309448, | |
| "learning_rate": 7.249001711351969e-06, | |
| "loss": 0.0259, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.9746355102856001, | |
| "grad_norm": 2.3690760135650635, | |
| "learning_rate": 7.234740444951512e-06, | |
| "loss": 0.0351, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.9786299181146395, | |
| "grad_norm": 3.11702823638916, | |
| "learning_rate": 7.220479178551056e-06, | |
| "loss": 0.0258, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.9826243259436789, | |
| "grad_norm": 2.2427687644958496, | |
| "learning_rate": 7.206217912150599e-06, | |
| "loss": 0.0328, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.9866187337727182, | |
| "grad_norm": 1.585270881652832, | |
| "learning_rate": 7.1919566457501435e-06, | |
| "loss": 0.0337, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.9906131416017575, | |
| "grad_norm": 3.1885695457458496, | |
| "learning_rate": 7.1776953793496876e-06, | |
| "loss": 0.0299, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.9946075494307969, | |
| "grad_norm": 3.44587779045105, | |
| "learning_rate": 7.16343411294923e-06, | |
| "loss": 0.0293, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.9986019572598362, | |
| "grad_norm": 2.4740006923675537, | |
| "learning_rate": 7.149172846548774e-06, | |
| "loss": 0.0317, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.0023966446974235, | |
| "grad_norm": 1.7924280166625977, | |
| "learning_rate": 7.134911580148318e-06, | |
| "loss": 0.0203, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.006391052526463, | |
| "grad_norm": 0.6403665542602539, | |
| "learning_rate": 7.120650313747861e-06, | |
| "loss": 0.0077, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.0103854603555023, | |
| "grad_norm": 1.2908625602722168, | |
| "learning_rate": 7.1063890473474055e-06, | |
| "loss": 0.0099, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.0143798681845417, | |
| "grad_norm": 0.9998202323913574, | |
| "learning_rate": 7.092127780946949e-06, | |
| "loss": 0.0079, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.018374276013581, | |
| "grad_norm": 0.6649907827377319, | |
| "learning_rate": 7.077866514546492e-06, | |
| "loss": 0.0098, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.0223686838426203, | |
| "grad_norm": 1.456899642944336, | |
| "learning_rate": 7.063605248146036e-06, | |
| "loss": 0.0095, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.0263630916716597, | |
| "grad_norm": 1.0050690174102783, | |
| "learning_rate": 7.04934398174558e-06, | |
| "loss": 0.0082, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.030357499500699, | |
| "grad_norm": 1.0253567695617676, | |
| "learning_rate": 7.035082715345123e-06, | |
| "loss": 0.011, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.0343519073297385, | |
| "grad_norm": 1.203191876411438, | |
| "learning_rate": 7.020821448944667e-06, | |
| "loss": 0.0086, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.0383463151587777, | |
| "grad_norm": 1.5093960762023926, | |
| "learning_rate": 7.006560182544211e-06, | |
| "loss": 0.0102, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.042340722987817, | |
| "grad_norm": 1.8297464847564697, | |
| "learning_rate": 6.992298916143754e-06, | |
| "loss": 0.0098, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.0463351308168565, | |
| "grad_norm": 1.1694895029067993, | |
| "learning_rate": 6.978037649743298e-06, | |
| "loss": 0.0084, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.0503295386458957, | |
| "grad_norm": 1.4883347749710083, | |
| "learning_rate": 6.963776383342841e-06, | |
| "loss": 0.0069, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.054323946474935, | |
| "grad_norm": 1.6328678131103516, | |
| "learning_rate": 6.949515116942385e-06, | |
| "loss": 0.0072, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.0583183543039745, | |
| "grad_norm": 1.665246605873108, | |
| "learning_rate": 6.935253850541929e-06, | |
| "loss": 0.0184, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.0623127621330137, | |
| "grad_norm": 0.8198590874671936, | |
| "learning_rate": 6.920992584141473e-06, | |
| "loss": 0.0075, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.0663071699620532, | |
| "grad_norm": 0.7312160730361938, | |
| "learning_rate": 6.906731317741015e-06, | |
| "loss": 0.0106, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.0703015777910925, | |
| "grad_norm": 1.2935587167739868, | |
| "learning_rate": 6.892470051340559e-06, | |
| "loss": 0.0121, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.0742959856201317, | |
| "grad_norm": 0.6566735506057739, | |
| "learning_rate": 6.878208784940103e-06, | |
| "loss": 0.0071, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.0782903934491712, | |
| "grad_norm": 1.318433403968811, | |
| "learning_rate": 6.863947518539647e-06, | |
| "loss": 0.0099, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.0822848012782105, | |
| "grad_norm": 2.248878240585327, | |
| "learning_rate": 6.849686252139191e-06, | |
| "loss": 0.0133, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.08627920910725, | |
| "grad_norm": 0.9404581785202026, | |
| "learning_rate": 6.835424985738735e-06, | |
| "loss": 0.0087, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.0902736169362892, | |
| "grad_norm": 1.51760733127594, | |
| "learning_rate": 6.821163719338277e-06, | |
| "loss": 0.0089, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.0942680247653285, | |
| "grad_norm": 1.5794305801391602, | |
| "learning_rate": 6.806902452937821e-06, | |
| "loss": 0.0096, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.098262432594368, | |
| "grad_norm": 1.9199167490005493, | |
| "learning_rate": 6.792641186537365e-06, | |
| "loss": 0.0091, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.1022568404234072, | |
| "grad_norm": 1.6599887609481812, | |
| "learning_rate": 6.778379920136909e-06, | |
| "loss": 0.0106, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.1062512482524465, | |
| "grad_norm": 0.8282005190849304, | |
| "learning_rate": 6.764118653736452e-06, | |
| "loss": 0.0118, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.110245656081486, | |
| "grad_norm": 2.2222962379455566, | |
| "learning_rate": 6.749857387335996e-06, | |
| "loss": 0.0075, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.1142400639105252, | |
| "grad_norm": 0.39625316858291626, | |
| "learning_rate": 6.735596120935539e-06, | |
| "loss": 0.011, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.1182344717395647, | |
| "grad_norm": 1.2594791650772095, | |
| "learning_rate": 6.721334854535083e-06, | |
| "loss": 0.0059, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.122228879568604, | |
| "grad_norm": 1.6027462482452393, | |
| "learning_rate": 6.7070735881346274e-06, | |
| "loss": 0.0098, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.1262232873976432, | |
| "grad_norm": 1.6769353151321411, | |
| "learning_rate": 6.69281232173417e-06, | |
| "loss": 0.008, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.1302176952266827, | |
| "grad_norm": 0.7512936592102051, | |
| "learning_rate": 6.678551055333714e-06, | |
| "loss": 0.0141, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.134212103055722, | |
| "grad_norm": 1.700231671333313, | |
| "learning_rate": 6.664289788933258e-06, | |
| "loss": 0.0121, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.1382065108847614, | |
| "grad_norm": 1.984163761138916, | |
| "learning_rate": 6.650028522532801e-06, | |
| "loss": 0.0133, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.1422009187138007, | |
| "grad_norm": 2.2872514724731445, | |
| "learning_rate": 6.635767256132345e-06, | |
| "loss": 0.0094, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.14619532654284, | |
| "grad_norm": 1.120602011680603, | |
| "learning_rate": 6.621505989731889e-06, | |
| "loss": 0.0076, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.1501897343718794, | |
| "grad_norm": 0.8228394389152527, | |
| "learning_rate": 6.607244723331432e-06, | |
| "loss": 0.0086, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.1541841422009187, | |
| "grad_norm": 1.6478750705718994, | |
| "learning_rate": 6.592983456930976e-06, | |
| "loss": 0.0145, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.158178550029958, | |
| "grad_norm": 2.5362820625305176, | |
| "learning_rate": 6.57872219053052e-06, | |
| "loss": 0.0097, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.1621729578589974, | |
| "grad_norm": 1.6985121965408325, | |
| "learning_rate": 6.5644609241300625e-06, | |
| "loss": 0.0114, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.1661673656880367, | |
| "grad_norm": 0.9929540753364563, | |
| "learning_rate": 6.5501996577296066e-06, | |
| "loss": 0.0107, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.1701617735170762, | |
| "grad_norm": 1.1043400764465332, | |
| "learning_rate": 6.535938391329151e-06, | |
| "loss": 0.0119, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.1741561813461154, | |
| "grad_norm": 2.4200518131256104, | |
| "learning_rate": 6.521677124928694e-06, | |
| "loss": 0.0111, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.1781505891751547, | |
| "grad_norm": 1.107531189918518, | |
| "learning_rate": 6.507415858528238e-06, | |
| "loss": 0.009, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.1821449970041942, | |
| "grad_norm": 1.9044643640518188, | |
| "learning_rate": 6.493154592127781e-06, | |
| "loss": 0.007, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.1861394048332334, | |
| "grad_norm": 1.1096996068954468, | |
| "learning_rate": 6.478893325727325e-06, | |
| "loss": 0.0076, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.190133812662273, | |
| "grad_norm": 2.1665825843811035, | |
| "learning_rate": 6.464632059326869e-06, | |
| "loss": 0.0097, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.1941282204913122, | |
| "grad_norm": 1.3429949283599854, | |
| "learning_rate": 6.450370792926413e-06, | |
| "loss": 0.0069, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.1981226283203514, | |
| "grad_norm": 1.5667604207992554, | |
| "learning_rate": 6.436109526525957e-06, | |
| "loss": 0.0091, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.202117036149391, | |
| "grad_norm": 1.0686432123184204, | |
| "learning_rate": 6.421848260125499e-06, | |
| "loss": 0.0073, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.2061114439784302, | |
| "grad_norm": 1.4043235778808594, | |
| "learning_rate": 6.407586993725043e-06, | |
| "loss": 0.0089, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.2101058518074694, | |
| "grad_norm": 1.6781501770019531, | |
| "learning_rate": 6.393325727324587e-06, | |
| "loss": 0.0129, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.214100259636509, | |
| "grad_norm": 0.7282060384750366, | |
| "learning_rate": 6.379064460924131e-06, | |
| "loss": 0.0109, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.2180946674655482, | |
| "grad_norm": 1.3478143215179443, | |
| "learning_rate": 6.364803194523675e-06, | |
| "loss": 0.0096, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.2220890752945877, | |
| "grad_norm": 1.9877102375030518, | |
| "learning_rate": 6.350541928123218e-06, | |
| "loss": 0.0083, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.226083483123627, | |
| "grad_norm": 1.2017593383789062, | |
| "learning_rate": 6.336280661722761e-06, | |
| "loss": 0.0067, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.2300778909526662, | |
| "grad_norm": 0.6694231629371643, | |
| "learning_rate": 6.322019395322305e-06, | |
| "loss": 0.0074, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.2340722987817057, | |
| "grad_norm": 0.8456215262413025, | |
| "learning_rate": 6.307758128921849e-06, | |
| "loss": 0.0083, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.238066706610745, | |
| "grad_norm": 0.8469221591949463, | |
| "learning_rate": 6.293496862521392e-06, | |
| "loss": 0.0111, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.2420611144397844, | |
| "grad_norm": 2.0267868041992188, | |
| "learning_rate": 6.279235596120936e-06, | |
| "loss": 0.0088, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.2460555222688237, | |
| "grad_norm": 1.6765652894973755, | |
| "learning_rate": 6.26497432972048e-06, | |
| "loss": 0.0101, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.250049930097863, | |
| "grad_norm": 0.974431037902832, | |
| "learning_rate": 6.250713063320023e-06, | |
| "loss": 0.0118, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.2540443379269024, | |
| "grad_norm": 1.4480584859848022, | |
| "learning_rate": 6.236451796919567e-06, | |
| "loss": 0.0096, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.2580387457559417, | |
| "grad_norm": 0.9492830038070679, | |
| "learning_rate": 6.222190530519111e-06, | |
| "loss": 0.01, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.262033153584981, | |
| "grad_norm": 2.0500473976135254, | |
| "learning_rate": 6.207929264118654e-06, | |
| "loss": 0.0121, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.2660275614140204, | |
| "grad_norm": 2.319540500640869, | |
| "learning_rate": 6.193667997718198e-06, | |
| "loss": 0.013, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.2700219692430597, | |
| "grad_norm": 1.5995023250579834, | |
| "learning_rate": 6.179406731317742e-06, | |
| "loss": 0.014, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.2740163770720991, | |
| "grad_norm": 1.7551332712173462, | |
| "learning_rate": 6.165145464917285e-06, | |
| "loss": 0.0103, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.2780107849011384, | |
| "grad_norm": 2.8841307163238525, | |
| "learning_rate": 6.1508841985168285e-06, | |
| "loss": 0.0085, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.2820051927301779, | |
| "grad_norm": 1.5693269968032837, | |
| "learning_rate": 6.136622932116373e-06, | |
| "loss": 0.013, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.2859996005592171, | |
| "grad_norm": 1.059067726135254, | |
| "learning_rate": 6.122361665715916e-06, | |
| "loss": 0.0122, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.2899940083882564, | |
| "grad_norm": 1.7539544105529785, | |
| "learning_rate": 6.10810039931546e-06, | |
| "loss": 0.0092, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.2939884162172959, | |
| "grad_norm": 1.1549718379974365, | |
| "learning_rate": 6.093839132915004e-06, | |
| "loss": 0.0084, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.2979828240463351, | |
| "grad_norm": 1.7076776027679443, | |
| "learning_rate": 6.0795778665145464e-06, | |
| "loss": 0.0084, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.3019772318753744, | |
| "grad_norm": 0.9175742268562317, | |
| "learning_rate": 6.0653166001140905e-06, | |
| "loss": 0.0069, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.3059716397044139, | |
| "grad_norm": 1.440813660621643, | |
| "learning_rate": 6.051055333713635e-06, | |
| "loss": 0.0115, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.3099660475334531, | |
| "grad_norm": 0.5184957981109619, | |
| "learning_rate": 6.036794067313178e-06, | |
| "loss": 0.0072, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.3139604553624924, | |
| "grad_norm": 0.3342525362968445, | |
| "learning_rate": 6.022532800912721e-06, | |
| "loss": 0.014, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.3179548631915319, | |
| "grad_norm": 2.238618850708008, | |
| "learning_rate": 6.008271534512265e-06, | |
| "loss": 0.0082, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.3219492710205711, | |
| "grad_norm": 7.149013996124268, | |
| "learning_rate": 5.9940102681118085e-06, | |
| "loss": 0.0127, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.3259436788496106, | |
| "grad_norm": 1.1425492763519287, | |
| "learning_rate": 5.9797490017113525e-06, | |
| "loss": 0.0115, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.3299380866786499, | |
| "grad_norm": 1.9267038106918335, | |
| "learning_rate": 5.965487735310897e-06, | |
| "loss": 0.0094, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.3339324945076894, | |
| "grad_norm": 0.8218601942062378, | |
| "learning_rate": 5.951226468910439e-06, | |
| "loss": 0.008, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.3379269023367286, | |
| "grad_norm": 1.620312213897705, | |
| "learning_rate": 5.936965202509983e-06, | |
| "loss": 0.0067, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.3419213101657679, | |
| "grad_norm": 0.7273368835449219, | |
| "learning_rate": 5.922703936109527e-06, | |
| "loss": 0.0096, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.3459157179948074, | |
| "grad_norm": 3.9472219944000244, | |
| "learning_rate": 5.9084426697090705e-06, | |
| "loss": 0.0102, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.3499101258238466, | |
| "grad_norm": 2.4310262203216553, | |
| "learning_rate": 5.8941814033086146e-06, | |
| "loss": 0.0154, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.3539045336528859, | |
| "grad_norm": 1.8209127187728882, | |
| "learning_rate": 5.879920136908158e-06, | |
| "loss": 0.0094, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.3578989414819254, | |
| "grad_norm": 1.022280216217041, | |
| "learning_rate": 5.865658870507701e-06, | |
| "loss": 0.0062, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.3618933493109646, | |
| "grad_norm": 1.8226642608642578, | |
| "learning_rate": 5.851397604107245e-06, | |
| "loss": 0.0113, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.3658877571400039, | |
| "grad_norm": 2.004735231399536, | |
| "learning_rate": 5.837136337706789e-06, | |
| "loss": 0.0121, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.3698821649690434, | |
| "grad_norm": 3.744194507598877, | |
| "learning_rate": 5.822875071306332e-06, | |
| "loss": 0.0096, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.3738765727980826, | |
| "grad_norm": 1.8464494943618774, | |
| "learning_rate": 5.808613804905876e-06, | |
| "loss": 0.009, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.377870980627122, | |
| "grad_norm": 1.8000208139419556, | |
| "learning_rate": 5.79435253850542e-06, | |
| "loss": 0.0112, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.3818653884561614, | |
| "grad_norm": 3.974322557449341, | |
| "learning_rate": 5.780091272104963e-06, | |
| "loss": 0.0114, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.3858597962852008, | |
| "grad_norm": 0.7045836448669434, | |
| "learning_rate": 5.765830005704507e-06, | |
| "loss": 0.0091, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.38985420411424, | |
| "grad_norm": 0.4988465905189514, | |
| "learning_rate": 5.751568739304051e-06, | |
| "loss": 0.0069, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.3938486119432794, | |
| "grad_norm": 0.8017894625663757, | |
| "learning_rate": 5.737307472903594e-06, | |
| "loss": 0.0096, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.3978430197723188, | |
| "grad_norm": 1.61961030960083, | |
| "learning_rate": 5.723046206503138e-06, | |
| "loss": 0.0081, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.401837427601358, | |
| "grad_norm": 2.5354208946228027, | |
| "learning_rate": 5.708784940102682e-06, | |
| "loss": 0.0091, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.4058318354303974, | |
| "grad_norm": 0.8456679582595825, | |
| "learning_rate": 5.694523673702225e-06, | |
| "loss": 0.012, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.4098262432594368, | |
| "grad_norm": 2.605341911315918, | |
| "learning_rate": 5.680262407301768e-06, | |
| "loss": 0.0088, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.413820651088476, | |
| "grad_norm": 1.4951987266540527, | |
| "learning_rate": 5.6660011409013125e-06, | |
| "loss": 0.013, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.4178150589175154, | |
| "grad_norm": 2.0663983821868896, | |
| "learning_rate": 5.651739874500856e-06, | |
| "loss": 0.0096, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.4218094667465548, | |
| "grad_norm": 0.9899235963821411, | |
| "learning_rate": 5.6374786081004e-06, | |
| "loss": 0.0079, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.425803874575594, | |
| "grad_norm": 0.7591453194618225, | |
| "learning_rate": 5.623217341699944e-06, | |
| "loss": 0.0067, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.4297982824046336, | |
| "grad_norm": 1.2285091876983643, | |
| "learning_rate": 5.608956075299488e-06, | |
| "loss": 0.0062, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.4337926902336728, | |
| "grad_norm": 1.8616957664489746, | |
| "learning_rate": 5.59469480889903e-06, | |
| "loss": 0.0092, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.4377870980627123, | |
| "grad_norm": 1.0690120458602905, | |
| "learning_rate": 5.5804335424985745e-06, | |
| "loss": 0.0066, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.4417815058917516, | |
| "grad_norm": 1.20885169506073, | |
| "learning_rate": 5.566172276098119e-06, | |
| "loss": 0.0107, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.4457759137207908, | |
| "grad_norm": 1.5971421003341675, | |
| "learning_rate": 5.551911009697661e-06, | |
| "loss": 0.0079, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.4497703215498303, | |
| "grad_norm": 1.570513129234314, | |
| "learning_rate": 5.537649743297205e-06, | |
| "loss": 0.0087, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.4537647293788696, | |
| "grad_norm": 0.9042888879776001, | |
| "learning_rate": 5.523388476896749e-06, | |
| "loss": 0.0127, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.4577591372079088, | |
| "grad_norm": 1.7177059650421143, | |
| "learning_rate": 5.509127210496292e-06, | |
| "loss": 0.008, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.4617535450369483, | |
| "grad_norm": 1.1265398263931274, | |
| "learning_rate": 5.4948659440958365e-06, | |
| "loss": 0.0066, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.4657479528659876, | |
| "grad_norm": 2.9831197261810303, | |
| "learning_rate": 5.480604677695381e-06, | |
| "loss": 0.0072, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.4697423606950268, | |
| "grad_norm": 1.4246543645858765, | |
| "learning_rate": 5.466343411294923e-06, | |
| "loss": 0.0074, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.4737367685240663, | |
| "grad_norm": 1.62272310256958, | |
| "learning_rate": 5.452082144894467e-06, | |
| "loss": 0.0077, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.4777311763531056, | |
| "grad_norm": 1.5177655220031738, | |
| "learning_rate": 5.437820878494011e-06, | |
| "loss": 0.0121, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.481725584182145, | |
| "grad_norm": 2.8331351280212402, | |
| "learning_rate": 5.4235596120935544e-06, | |
| "loss": 0.0061, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.4857199920111843, | |
| "grad_norm": 1.5248603820800781, | |
| "learning_rate": 5.409298345693098e-06, | |
| "loss": 0.0109, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.4897143998402238, | |
| "grad_norm": 1.143097996711731, | |
| "learning_rate": 5.395037079292642e-06, | |
| "loss": 0.01, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.493708807669263, | |
| "grad_norm": 1.9222826957702637, | |
| "learning_rate": 5.380775812892185e-06, | |
| "loss": 0.0076, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.4977032154983023, | |
| "grad_norm": 1.4354894161224365, | |
| "learning_rate": 5.366514546491729e-06, | |
| "loss": 0.0101, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.5016976233273418, | |
| "grad_norm": 0.6621835231781006, | |
| "learning_rate": 5.352253280091273e-06, | |
| "loss": 0.0098, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.505692031156381, | |
| "grad_norm": 1.5087827444076538, | |
| "learning_rate": 5.337992013690816e-06, | |
| "loss": 0.0114, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.5096864389854203, | |
| "grad_norm": 1.2942672967910767, | |
| "learning_rate": 5.32373074729036e-06, | |
| "loss": 0.0069, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.5136808468144598, | |
| "grad_norm": 1.9707920551300049, | |
| "learning_rate": 5.309469480889904e-06, | |
| "loss": 0.0098, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.517675254643499, | |
| "grad_norm": 0.8530262112617493, | |
| "learning_rate": 5.295208214489447e-06, | |
| "loss": 0.0083, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.5216696624725383, | |
| "grad_norm": 1.9188873767852783, | |
| "learning_rate": 5.280946948088991e-06, | |
| "loss": 0.0081, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.5256640703015778, | |
| "grad_norm": 1.7006468772888184, | |
| "learning_rate": 5.266685681688534e-06, | |
| "loss": 0.0136, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.5296584781306173, | |
| "grad_norm": 1.3003968000411987, | |
| "learning_rate": 5.252424415288078e-06, | |
| "loss": 0.0071, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.5336528859596563, | |
| "grad_norm": 1.1423379182815552, | |
| "learning_rate": 5.238163148887622e-06, | |
| "loss": 0.0095, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.5376472937886958, | |
| "grad_norm": 0.8597344756126404, | |
| "learning_rate": 5.223901882487166e-06, | |
| "loss": 0.0084, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.5416417016177353, | |
| "grad_norm": 1.0262315273284912, | |
| "learning_rate": 5.209640616086708e-06, | |
| "loss": 0.0104, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.5456361094467745, | |
| "grad_norm": 1.3405098915100098, | |
| "learning_rate": 5.195379349686252e-06, | |
| "loss": 0.0172, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.5496305172758138, | |
| "grad_norm": 0.7442639470100403, | |
| "learning_rate": 5.1811180832857964e-06, | |
| "loss": 0.0077, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.5536249251048533, | |
| "grad_norm": 0.856499433517456, | |
| "learning_rate": 5.16685681688534e-06, | |
| "loss": 0.0089, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.5576193329338925, | |
| "grad_norm": 2.6278645992279053, | |
| "learning_rate": 5.152595550484884e-06, | |
| "loss": 0.0095, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.5616137407629318, | |
| "grad_norm": 1.86880624294281, | |
| "learning_rate": 5.138334284084428e-06, | |
| "loss": 0.0124, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.5656081485919713, | |
| "grad_norm": 0.583057165145874, | |
| "learning_rate": 5.12407301768397e-06, | |
| "loss": 0.0062, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.5696025564210105, | |
| "grad_norm": 1.2156856060028076, | |
| "learning_rate": 5.109811751283514e-06, | |
| "loss": 0.0081, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.5735969642500498, | |
| "grad_norm": 0.9274953603744507, | |
| "learning_rate": 5.0955504848830584e-06, | |
| "loss": 0.01, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.5775913720790893, | |
| "grad_norm": 1.0414108037948608, | |
| "learning_rate": 5.081289218482601e-06, | |
| "loss": 0.0088, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.5815857799081288, | |
| "grad_norm": 1.3919917345046997, | |
| "learning_rate": 5.067027952082145e-06, | |
| "loss": 0.0082, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.5855801877371678, | |
| "grad_norm": 0.8071143627166748, | |
| "learning_rate": 5.052766685681689e-06, | |
| "loss": 0.0073, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.5895745955662073, | |
| "grad_norm": 1.5692224502563477, | |
| "learning_rate": 5.038505419281232e-06, | |
| "loss": 0.0074, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.5935690033952468, | |
| "grad_norm": 1.0880498886108398, | |
| "learning_rate": 5.024244152880776e-06, | |
| "loss": 0.0127, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.597563411224286, | |
| "grad_norm": 1.7401431798934937, | |
| "learning_rate": 5.0099828864803205e-06, | |
| "loss": 0.015, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.597563411224286, | |
| "eval_loss": 0.01016959734261036, | |
| "eval_runtime": 7385.3342, | |
| "eval_samples_per_second": 2.711, | |
| "eval_steps_per_second": 0.339, | |
| "eval_wer": 0.8737253615462425, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.6015578190533253, | |
| "grad_norm": 0.680465817451477, | |
| "learning_rate": 4.995721620079864e-06, | |
| "loss": 0.009, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.6055522268823648, | |
| "grad_norm": 0.3950757086277008, | |
| "learning_rate": 4.981460353679407e-06, | |
| "loss": 0.0091, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.609546634711404, | |
| "grad_norm": 2.6939499378204346, | |
| "learning_rate": 4.96719908727895e-06, | |
| "loss": 0.0106, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.6135410425404433, | |
| "grad_norm": 1.401220679283142, | |
| "learning_rate": 4.952937820878494e-06, | |
| "loss": 0.0112, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.6175354503694828, | |
| "grad_norm": 1.7981312274932861, | |
| "learning_rate": 4.9386765544780376e-06, | |
| "loss": 0.0061, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.621529858198522, | |
| "grad_norm": 2.05911922454834, | |
| "learning_rate": 4.924415288077582e-06, | |
| "loss": 0.0088, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.6255242660275613, | |
| "grad_norm": 0.5581210851669312, | |
| "learning_rate": 4.910154021677126e-06, | |
| "loss": 0.0095, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.6295186738566008, | |
| "grad_norm": 1.671968936920166, | |
| "learning_rate": 4.895892755276669e-06, | |
| "loss": 0.0102, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.6335130816856402, | |
| "grad_norm": 1.0555320978164673, | |
| "learning_rate": 4.881631488876213e-06, | |
| "loss": 0.012, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.6375074895146795, | |
| "grad_norm": 0.5234736204147339, | |
| "learning_rate": 4.867370222475756e-06, | |
| "loss": 0.0092, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.6415018973437188, | |
| "grad_norm": 1.0109747648239136, | |
| "learning_rate": 4.8531089560753e-06, | |
| "loss": 0.0086, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.6454963051727582, | |
| "grad_norm": 2.0059330463409424, | |
| "learning_rate": 4.838847689674844e-06, | |
| "loss": 0.0119, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.6494907130017975, | |
| "grad_norm": 1.5553911924362183, | |
| "learning_rate": 4.824586423274387e-06, | |
| "loss": 0.0083, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.6534851208308368, | |
| "grad_norm": 1.573845624923706, | |
| "learning_rate": 4.810325156873931e-06, | |
| "loss": 0.0093, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.6574795286598762, | |
| "grad_norm": 0.5471251606941223, | |
| "learning_rate": 4.796063890473474e-06, | |
| "loss": 0.0067, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.6614739364889155, | |
| "grad_norm": 2.054495096206665, | |
| "learning_rate": 4.781802624073018e-06, | |
| "loss": 0.016, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.6654683443179548, | |
| "grad_norm": 1.1892727613449097, | |
| "learning_rate": 4.767541357672562e-06, | |
| "loss": 0.0089, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.6694627521469942, | |
| "grad_norm": 0.7050298452377319, | |
| "learning_rate": 4.753280091272106e-06, | |
| "loss": 0.0068, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.6734571599760335, | |
| "grad_norm": 1.208492398262024, | |
| "learning_rate": 4.739018824871649e-06, | |
| "loss": 0.0064, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.6774515678050728, | |
| "grad_norm": 0.864264965057373, | |
| "learning_rate": 4.724757558471192e-06, | |
| "loss": 0.0084, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.6814459756341122, | |
| "grad_norm": 1.3503026962280273, | |
| "learning_rate": 4.710496292070736e-06, | |
| "loss": 0.0082, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.6854403834631517, | |
| "grad_norm": 1.3305470943450928, | |
| "learning_rate": 4.6962350256702795e-06, | |
| "loss": 0.0108, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.689434791292191, | |
| "grad_norm": 1.7467732429504395, | |
| "learning_rate": 4.681973759269824e-06, | |
| "loss": 0.0081, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.6934291991212302, | |
| "grad_norm": 2.1052684783935547, | |
| "learning_rate": 4.667712492869368e-06, | |
| "loss": 0.0157, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.6974236069502697, | |
| "grad_norm": 1.0252904891967773, | |
| "learning_rate": 4.653451226468911e-06, | |
| "loss": 0.0066, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.701418014779309, | |
| "grad_norm": 1.6374844312667847, | |
| "learning_rate": 4.639189960068454e-06, | |
| "loss": 0.0096, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.7054124226083482, | |
| "grad_norm": 1.1286324262619019, | |
| "learning_rate": 4.624928693667998e-06, | |
| "loss": 0.0076, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.7094068304373877, | |
| "grad_norm": 2.2228775024414062, | |
| "learning_rate": 4.6106674272675416e-06, | |
| "loss": 0.0092, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.713401238266427, | |
| "grad_norm": 1.5603349208831787, | |
| "learning_rate": 4.596406160867085e-06, | |
| "loss": 0.0065, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.7173956460954662, | |
| "grad_norm": 1.7231379747390747, | |
| "learning_rate": 4.582144894466629e-06, | |
| "loss": 0.0079, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.7213900539245057, | |
| "grad_norm": 2.4461798667907715, | |
| "learning_rate": 4.567883628066173e-06, | |
| "loss": 0.0091, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.725384461753545, | |
| "grad_norm": 1.6841260194778442, | |
| "learning_rate": 4.553622361665716e-06, | |
| "loss": 0.0102, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.7293788695825842, | |
| "grad_norm": 1.3950663805007935, | |
| "learning_rate": 4.53936109526526e-06, | |
| "loss": 0.0063, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.7333732774116237, | |
| "grad_norm": 1.364123821258545, | |
| "learning_rate": 4.525099828864804e-06, | |
| "loss": 0.0088, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.7373676852406632, | |
| "grad_norm": 1.0013163089752197, | |
| "learning_rate": 4.510838562464347e-06, | |
| "loss": 0.0083, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.7413620930697025, | |
| "grad_norm": 3.0454695224761963, | |
| "learning_rate": 4.496577296063891e-06, | |
| "loss": 0.007, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.7453565008987417, | |
| "grad_norm": 1.0976226329803467, | |
| "learning_rate": 4.482316029663434e-06, | |
| "loss": 0.0065, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.7493509087277812, | |
| "grad_norm": 1.0366853475570679, | |
| "learning_rate": 4.468054763262978e-06, | |
| "loss": 0.0072, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.7533453165568205, | |
| "grad_norm": 1.7006181478500366, | |
| "learning_rate": 4.4537934968625215e-06, | |
| "loss": 0.0079, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.7573397243858597, | |
| "grad_norm": 2.483431100845337, | |
| "learning_rate": 4.439532230462066e-06, | |
| "loss": 0.0097, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.7613341322148992, | |
| "grad_norm": 0.44049394130706787, | |
| "learning_rate": 4.425270964061609e-06, | |
| "loss": 0.011, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.7653285400439385, | |
| "grad_norm": 1.3942147493362427, | |
| "learning_rate": 4.411009697661153e-06, | |
| "loss": 0.0076, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.7693229478729777, | |
| "grad_norm": 1.5767748355865479, | |
| "learning_rate": 4.396748431260696e-06, | |
| "loss": 0.0114, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.7733173557020172, | |
| "grad_norm": 2.5710384845733643, | |
| "learning_rate": 4.38248716486024e-06, | |
| "loss": 0.0123, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.7773117635310565, | |
| "grad_norm": 0.730352520942688, | |
| "learning_rate": 4.3682258984597835e-06, | |
| "loss": 0.0048, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.7813061713600957, | |
| "grad_norm": 0.7298102974891663, | |
| "learning_rate": 4.353964632059327e-06, | |
| "loss": 0.0135, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.7853005791891352, | |
| "grad_norm": 0.6274979114532471, | |
| "learning_rate": 4.339703365658871e-06, | |
| "loss": 0.0091, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.7892949870181747, | |
| "grad_norm": 3.2221016883850098, | |
| "learning_rate": 4.325442099258414e-06, | |
| "loss": 0.0136, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.793289394847214, | |
| "grad_norm": 0.959096372127533, | |
| "learning_rate": 4.311180832857958e-06, | |
| "loss": 0.0083, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.7972838026762532, | |
| "grad_norm": 1.216997504234314, | |
| "learning_rate": 4.296919566457502e-06, | |
| "loss": 0.0064, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.8012782105052927, | |
| "grad_norm": 1.628796935081482, | |
| "learning_rate": 4.2826583000570456e-06, | |
| "loss": 0.0082, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.805272618334332, | |
| "grad_norm": 0.5727189183235168, | |
| "learning_rate": 4.268397033656589e-06, | |
| "loss": 0.0102, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.8092670261633712, | |
| "grad_norm": 2.1575019359588623, | |
| "learning_rate": 4.254135767256133e-06, | |
| "loss": 0.0072, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.8132614339924107, | |
| "grad_norm": 1.3159669637680054, | |
| "learning_rate": 4.239874500855676e-06, | |
| "loss": 0.0115, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.81725584182145, | |
| "grad_norm": 1.2897024154663086, | |
| "learning_rate": 4.225613234455219e-06, | |
| "loss": 0.007, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.8212502496504892, | |
| "grad_norm": 1.1860707998275757, | |
| "learning_rate": 4.2113519680547635e-06, | |
| "loss": 0.0071, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.8252446574795287, | |
| "grad_norm": 1.6072814464569092, | |
| "learning_rate": 4.197090701654308e-06, | |
| "loss": 0.0072, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.8292390653085682, | |
| "grad_norm": 0.8610338568687439, | |
| "learning_rate": 4.182829435253851e-06, | |
| "loss": 0.0079, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.8332334731376072, | |
| "grad_norm": 1.0036977529525757, | |
| "learning_rate": 4.168568168853395e-06, | |
| "loss": 0.0053, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.8372278809666467, | |
| "grad_norm": 0.8917168974876404, | |
| "learning_rate": 4.154306902452938e-06, | |
| "loss": 0.0065, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.8412222887956862, | |
| "grad_norm": 2.3281478881835938, | |
| "learning_rate": 4.1400456360524814e-06, | |
| "loss": 0.0067, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.8452166966247254, | |
| "grad_norm": 1.1472433805465698, | |
| "learning_rate": 4.1257843696520255e-06, | |
| "loss": 0.0116, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.8492111044537647, | |
| "grad_norm": 1.0347813367843628, | |
| "learning_rate": 4.111523103251569e-06, | |
| "loss": 0.0057, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.8532055122828042, | |
| "grad_norm": 0.8118449449539185, | |
| "learning_rate": 4.097261836851113e-06, | |
| "loss": 0.0067, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.8571999201118434, | |
| "grad_norm": 0.8567641973495483, | |
| "learning_rate": 4.083000570450656e-06, | |
| "loss": 0.0099, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.8611943279408827, | |
| "grad_norm": 1.2339649200439453, | |
| "learning_rate": 4.0687393040502e-06, | |
| "loss": 0.0057, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.8651887357699222, | |
| "grad_norm": 3.1318016052246094, | |
| "learning_rate": 4.054478037649744e-06, | |
| "loss": 0.0083, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.8691831435989614, | |
| "grad_norm": 1.414455771446228, | |
| "learning_rate": 4.0402167712492876e-06, | |
| "loss": 0.0082, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.8731775514280007, | |
| "grad_norm": 1.44307279586792, | |
| "learning_rate": 4.025955504848831e-06, | |
| "loss": 0.0109, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.8771719592570402, | |
| "grad_norm": 1.7109726667404175, | |
| "learning_rate": 4.011694238448375e-06, | |
| "loss": 0.0083, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.8811663670860796, | |
| "grad_norm": 0.5837492942810059, | |
| "learning_rate": 3.997432972047918e-06, | |
| "loss": 0.0093, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.8851607749151187, | |
| "grad_norm": 1.2008476257324219, | |
| "learning_rate": 3.983171705647461e-06, | |
| "loss": 0.0084, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.8891551827441582, | |
| "grad_norm": 0.686278760433197, | |
| "learning_rate": 3.9689104392470055e-06, | |
| "loss": 0.0081, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.8931495905731976, | |
| "grad_norm": 1.5602636337280273, | |
| "learning_rate": 3.954649172846549e-06, | |
| "loss": 0.0063, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.897143998402237, | |
| "grad_norm": 2.410867214202881, | |
| "learning_rate": 3.940387906446093e-06, | |
| "loss": 0.0109, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.9011384062312762, | |
| "grad_norm": 1.423948049545288, | |
| "learning_rate": 3.926126640045637e-06, | |
| "loss": 0.0082, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.9051328140603156, | |
| "grad_norm": 1.2718310356140137, | |
| "learning_rate": 3.91186537364518e-06, | |
| "loss": 0.0093, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.909127221889355, | |
| "grad_norm": 1.6484652757644653, | |
| "learning_rate": 3.897604107244723e-06, | |
| "loss": 0.0053, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.9131216297183942, | |
| "grad_norm": 1.5819053649902344, | |
| "learning_rate": 3.8833428408442675e-06, | |
| "loss": 0.007, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.9171160375474336, | |
| "grad_norm": 0.7520307898521423, | |
| "learning_rate": 3.869081574443811e-06, | |
| "loss": 0.0077, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.921110445376473, | |
| "grad_norm": 1.954355001449585, | |
| "learning_rate": 3.854820308043354e-06, | |
| "loss": 0.006, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.9251048532055122, | |
| "grad_norm": 1.853141188621521, | |
| "learning_rate": 3.840559041642898e-06, | |
| "loss": 0.0061, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.9290992610345516, | |
| "grad_norm": 1.5008714199066162, | |
| "learning_rate": 3.826297775242442e-06, | |
| "loss": 0.0074, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.9330936688635911, | |
| "grad_norm": 1.2396436929702759, | |
| "learning_rate": 3.8120365088419854e-06, | |
| "loss": 0.0068, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.9370880766926302, | |
| "grad_norm": 1.0389924049377441, | |
| "learning_rate": 3.797775242441529e-06, | |
| "loss": 0.0103, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.9410824845216696, | |
| "grad_norm": 0.9938933253288269, | |
| "learning_rate": 3.7835139760410728e-06, | |
| "loss": 0.0092, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.9450768923507091, | |
| "grad_norm": 2.3726730346679688, | |
| "learning_rate": 3.769252709640616e-06, | |
| "loss": 0.0059, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.9490713001797484, | |
| "grad_norm": 1.4253814220428467, | |
| "learning_rate": 3.75499144324016e-06, | |
| "loss": 0.0071, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.9530657080087876, | |
| "grad_norm": 0.777284562587738, | |
| "learning_rate": 3.740730176839704e-06, | |
| "loss": 0.0083, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.9570601158378271, | |
| "grad_norm": 1.6175594329833984, | |
| "learning_rate": 3.726468910439247e-06, | |
| "loss": 0.0081, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.9610545236668664, | |
| "grad_norm": 1.2781018018722534, | |
| "learning_rate": 3.712207644038791e-06, | |
| "loss": 0.0078, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.9650489314959056, | |
| "grad_norm": 1.007778525352478, | |
| "learning_rate": 3.6979463776383344e-06, | |
| "loss": 0.0073, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.9690433393249451, | |
| "grad_norm": 2.0084667205810547, | |
| "learning_rate": 3.683685111237878e-06, | |
| "loss": 0.0089, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.9730377471539844, | |
| "grad_norm": 1.049784541130066, | |
| "learning_rate": 3.669423844837422e-06, | |
| "loss": 0.0064, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.9770321549830236, | |
| "grad_norm": 1.5071396827697754, | |
| "learning_rate": 3.6551625784369654e-06, | |
| "loss": 0.0059, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.9810265628120631, | |
| "grad_norm": 1.4314241409301758, | |
| "learning_rate": 3.640901312036509e-06, | |
| "loss": 0.0069, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.9850209706411026, | |
| "grad_norm": 1.3423244953155518, | |
| "learning_rate": 3.6266400456360527e-06, | |
| "loss": 0.0084, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.9890153784701416, | |
| "grad_norm": 0.7151647806167603, | |
| "learning_rate": 3.6123787792355964e-06, | |
| "loss": 0.009, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.9930097862991811, | |
| "grad_norm": 0.9428077936172485, | |
| "learning_rate": 3.5981175128351405e-06, | |
| "loss": 0.0047, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.9970041941282206, | |
| "grad_norm": 1.7378363609313965, | |
| "learning_rate": 3.5838562464346838e-06, | |
| "loss": 0.0073, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 2.000798881565808, | |
| "grad_norm": 0.26533937454223633, | |
| "learning_rate": 3.5695949800342274e-06, | |
| "loss": 0.004, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 2.004793289394847, | |
| "grad_norm": 0.6320909261703491, | |
| "learning_rate": 3.555333713633771e-06, | |
| "loss": 0.0028, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 2.0087876972238865, | |
| "grad_norm": 0.3733059763908386, | |
| "learning_rate": 3.5410724472333148e-06, | |
| "loss": 0.0024, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 2.012782105052926, | |
| "grad_norm": 0.3936101794242859, | |
| "learning_rate": 3.526811180832858e-06, | |
| "loss": 0.0015, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 2.0167765128819655, | |
| "grad_norm": 0.34495025873184204, | |
| "learning_rate": 3.512549914432402e-06, | |
| "loss": 0.0017, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 2.0207709207110045, | |
| "grad_norm": 0.19415637850761414, | |
| "learning_rate": 3.4982886480319454e-06, | |
| "loss": 0.0023, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 2.024765328540044, | |
| "grad_norm": 0.3872200548648834, | |
| "learning_rate": 3.484027381631489e-06, | |
| "loss": 0.0016, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 2.0287597363690835, | |
| "grad_norm": 0.3008989691734314, | |
| "learning_rate": 3.469766115231033e-06, | |
| "loss": 0.0013, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 2.0327541441981225, | |
| "grad_norm": 0.2125740796327591, | |
| "learning_rate": 3.4555048488305764e-06, | |
| "loss": 0.0017, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 2.036748552027162, | |
| "grad_norm": 0.5899261236190796, | |
| "learning_rate": 3.44124358243012e-06, | |
| "loss": 0.0021, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 2.0407429598562015, | |
| "grad_norm": 0.5872080326080322, | |
| "learning_rate": 3.4269823160296637e-06, | |
| "loss": 0.0021, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 2.0447373676852405, | |
| "grad_norm": 1.2929030656814575, | |
| "learning_rate": 3.4127210496292074e-06, | |
| "loss": 0.0028, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 2.04873177551428, | |
| "grad_norm": 0.15616489946842194, | |
| "learning_rate": 3.3984597832287506e-06, | |
| "loss": 0.0023, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 2.0527261833433195, | |
| "grad_norm": 0.25699567794799805, | |
| "learning_rate": 3.3841985168282947e-06, | |
| "loss": 0.0033, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 2.0567205911723585, | |
| "grad_norm": 1.5154634714126587, | |
| "learning_rate": 3.3699372504278384e-06, | |
| "loss": 0.0017, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 2.060714999001398, | |
| "grad_norm": 0.952170729637146, | |
| "learning_rate": 3.3556759840273816e-06, | |
| "loss": 0.0029, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 2.0647094068304375, | |
| "grad_norm": 0.32178279757499695, | |
| "learning_rate": 3.3414147176269257e-06, | |
| "loss": 0.0026, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 2.068703814659477, | |
| "grad_norm": 0.3404058516025543, | |
| "learning_rate": 3.327153451226469e-06, | |
| "loss": 0.0022, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 2.072698222488516, | |
| "grad_norm": 0.47805553674697876, | |
| "learning_rate": 3.3128921848260127e-06, | |
| "loss": 0.0044, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 2.0766926303175555, | |
| "grad_norm": 0.15118145942687988, | |
| "learning_rate": 3.2986309184255568e-06, | |
| "loss": 0.0015, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 2.080687038146595, | |
| "grad_norm": 1.853371262550354, | |
| "learning_rate": 3.2843696520251e-06, | |
| "loss": 0.0057, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 2.084681445975634, | |
| "grad_norm": 0.37700751423835754, | |
| "learning_rate": 3.2701083856246437e-06, | |
| "loss": 0.0051, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 2.0886758538046735, | |
| "grad_norm": 0.3602418601512909, | |
| "learning_rate": 3.2558471192241873e-06, | |
| "loss": 0.0019, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 2.092670261633713, | |
| "grad_norm": 0.20452773571014404, | |
| "learning_rate": 3.241585852823731e-06, | |
| "loss": 0.0028, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 2.096664669462752, | |
| "grad_norm": 0.23050379753112793, | |
| "learning_rate": 3.2273245864232743e-06, | |
| "loss": 0.0013, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 2.1006590772917915, | |
| "grad_norm": 1.7879595756530762, | |
| "learning_rate": 3.2130633200228184e-06, | |
| "loss": 0.0054, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 2.104653485120831, | |
| "grad_norm": 0.29012367129325867, | |
| "learning_rate": 3.198802053622362e-06, | |
| "loss": 0.0018, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 2.10864789294987, | |
| "grad_norm": 0.08294820040464401, | |
| "learning_rate": 3.1845407872219053e-06, | |
| "loss": 0.0021, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 2.1126423007789095, | |
| "grad_norm": 0.23702895641326904, | |
| "learning_rate": 3.1702795208214494e-06, | |
| "loss": 0.0017, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 2.116636708607949, | |
| "grad_norm": 0.1386740505695343, | |
| "learning_rate": 3.1560182544209926e-06, | |
| "loss": 0.0014, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 2.1206311164369884, | |
| "grad_norm": 0.18367265164852142, | |
| "learning_rate": 3.1417569880205367e-06, | |
| "loss": 0.0027, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 2.1246255242660275, | |
| "grad_norm": 0.40612369775772095, | |
| "learning_rate": 3.1274957216200804e-06, | |
| "loss": 0.0031, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 2.128619932095067, | |
| "grad_norm": 1.8997491598129272, | |
| "learning_rate": 3.1132344552196236e-06, | |
| "loss": 0.0016, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 2.1326143399241064, | |
| "grad_norm": 0.16036643087863922, | |
| "learning_rate": 3.0989731888191677e-06, | |
| "loss": 0.0019, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 2.1366087477531455, | |
| "grad_norm": 0.1557471603155136, | |
| "learning_rate": 3.084711922418711e-06, | |
| "loss": 0.0022, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 2.140603155582185, | |
| "grad_norm": 0.12167658656835556, | |
| "learning_rate": 3.0704506560182546e-06, | |
| "loss": 0.0028, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 2.1445975634112244, | |
| "grad_norm": 0.21444401144981384, | |
| "learning_rate": 3.0561893896177987e-06, | |
| "loss": 0.0033, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 2.1485919712402635, | |
| "grad_norm": 0.588347852230072, | |
| "learning_rate": 3.041928123217342e-06, | |
| "loss": 0.0019, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 2.152586379069303, | |
| "grad_norm": 0.1496289074420929, | |
| "learning_rate": 3.0276668568168852e-06, | |
| "loss": 0.0022, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 2.1565807868983424, | |
| "grad_norm": 0.11246706545352936, | |
| "learning_rate": 3.0134055904164293e-06, | |
| "loss": 0.0019, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 2.1605751947273815, | |
| "grad_norm": 0.6423232555389404, | |
| "learning_rate": 2.999144324015973e-06, | |
| "loss": 0.0012, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 2.164569602556421, | |
| "grad_norm": 0.17058268189430237, | |
| "learning_rate": 2.9848830576155162e-06, | |
| "loss": 0.0016, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 2.1685640103854604, | |
| "grad_norm": 0.4728565216064453, | |
| "learning_rate": 2.9706217912150603e-06, | |
| "loss": 0.0052, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 2.1725584182145, | |
| "grad_norm": 0.19660444557666779, | |
| "learning_rate": 2.9563605248146036e-06, | |
| "loss": 0.003, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 2.176552826043539, | |
| "grad_norm": 0.35805192589759827, | |
| "learning_rate": 2.9420992584141473e-06, | |
| "loss": 0.0013, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 2.1805472338725784, | |
| "grad_norm": 0.5961002111434937, | |
| "learning_rate": 2.9278379920136913e-06, | |
| "loss": 0.0016, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 2.184541641701618, | |
| "grad_norm": 0.40820205211639404, | |
| "learning_rate": 2.9135767256132346e-06, | |
| "loss": 0.0012, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 2.188536049530657, | |
| "grad_norm": 0.22801029682159424, | |
| "learning_rate": 2.8993154592127783e-06, | |
| "loss": 0.0019, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 2.1925304573596964, | |
| "grad_norm": 0.18611891567707062, | |
| "learning_rate": 2.885054192812322e-06, | |
| "loss": 0.0023, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 2.196524865188736, | |
| "grad_norm": 0.2685863673686981, | |
| "learning_rate": 2.8707929264118656e-06, | |
| "loss": 0.0024, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 2.200519273017775, | |
| "grad_norm": 0.4682328402996063, | |
| "learning_rate": 2.856531660011409e-06, | |
| "loss": 0.0018, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 2.2045136808468144, | |
| "grad_norm": 0.11273177713155746, | |
| "learning_rate": 2.842270393610953e-06, | |
| "loss": 0.0013, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 2.208508088675854, | |
| "grad_norm": 0.3277330994606018, | |
| "learning_rate": 2.8280091272104966e-06, | |
| "loss": 0.0014, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 2.212502496504893, | |
| "grad_norm": 0.12008125334978104, | |
| "learning_rate": 2.81374786081004e-06, | |
| "loss": 0.0014, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 2.2164969043339324, | |
| "grad_norm": 0.07059912383556366, | |
| "learning_rate": 2.799486594409584e-06, | |
| "loss": 0.0038, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 2.220491312162972, | |
| "grad_norm": 0.33425989747047424, | |
| "learning_rate": 2.785225328009127e-06, | |
| "loss": 0.004, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 2.2244857199920114, | |
| "grad_norm": 0.7122307419776917, | |
| "learning_rate": 2.770964061608671e-06, | |
| "loss": 0.0022, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 2.2284801278210504, | |
| "grad_norm": 0.598380982875824, | |
| "learning_rate": 2.756702795208215e-06, | |
| "loss": 0.0019, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 2.23247453565009, | |
| "grad_norm": 0.10900934785604477, | |
| "learning_rate": 2.7424415288077582e-06, | |
| "loss": 0.0014, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 2.2364689434791294, | |
| "grad_norm": 0.11831914633512497, | |
| "learning_rate": 2.728180262407302e-06, | |
| "loss": 0.0032, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 2.2404633513081684, | |
| "grad_norm": 0.15863606333732605, | |
| "learning_rate": 2.7139189960068456e-06, | |
| "loss": 0.0021, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 2.244457759137208, | |
| "grad_norm": 0.8150619864463806, | |
| "learning_rate": 2.6996577296063892e-06, | |
| "loss": 0.0025, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 2.2484521669662474, | |
| "grad_norm": 0.16623784601688385, | |
| "learning_rate": 2.6853964632059333e-06, | |
| "loss": 0.0016, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 2.2524465747952864, | |
| "grad_norm": 0.48935920000076294, | |
| "learning_rate": 2.6711351968054766e-06, | |
| "loss": 0.0013, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 2.256440982624326, | |
| "grad_norm": 0.13353575766086578, | |
| "learning_rate": 2.6568739304050202e-06, | |
| "loss": 0.0014, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 2.2604353904533654, | |
| "grad_norm": 0.10841682553291321, | |
| "learning_rate": 2.642612664004564e-06, | |
| "loss": 0.003, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 2.2644297982824044, | |
| "grad_norm": 0.18046899139881134, | |
| "learning_rate": 2.6283513976041076e-06, | |
| "loss": 0.0018, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 2.268424206111444, | |
| "grad_norm": 0.16014991700649261, | |
| "learning_rate": 2.614090131203651e-06, | |
| "loss": 0.0023, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 2.2724186139404834, | |
| "grad_norm": 0.19527053833007812, | |
| "learning_rate": 2.599828864803195e-06, | |
| "loss": 0.0019, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 2.276413021769523, | |
| "grad_norm": 0.2142389565706253, | |
| "learning_rate": 2.5855675984027386e-06, | |
| "loss": 0.0016, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 2.280407429598562, | |
| "grad_norm": 0.2971132695674896, | |
| "learning_rate": 2.571306332002282e-06, | |
| "loss": 0.0024, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 2.2844018374276014, | |
| "grad_norm": 0.29556071758270264, | |
| "learning_rate": 2.557045065601826e-06, | |
| "loss": 0.003, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 2.288396245256641, | |
| "grad_norm": 0.1596396416425705, | |
| "learning_rate": 2.542783799201369e-06, | |
| "loss": 0.0027, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 2.29239065308568, | |
| "grad_norm": 0.13799555599689484, | |
| "learning_rate": 2.528522532800913e-06, | |
| "loss": 0.0011, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 2.2963850609147194, | |
| "grad_norm": 0.28748640418052673, | |
| "learning_rate": 2.514261266400457e-06, | |
| "loss": 0.0025, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 2.300379468743759, | |
| "grad_norm": 0.06540987640619278, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0027, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 2.304373876572798, | |
| "grad_norm": 0.2653571665287018, | |
| "learning_rate": 2.485738733599544e-06, | |
| "loss": 0.0008, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 2.3083682844018374, | |
| "grad_norm": 0.1664719134569168, | |
| "learning_rate": 2.4714774671990875e-06, | |
| "loss": 0.0018, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 2.312362692230877, | |
| "grad_norm": 0.041943565011024475, | |
| "learning_rate": 2.4572162007986312e-06, | |
| "loss": 0.0013, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 2.316357100059916, | |
| "grad_norm": 0.23101578652858734, | |
| "learning_rate": 2.442954934398175e-06, | |
| "loss": 0.0024, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 2.3203515078889554, | |
| "grad_norm": 0.5547912120819092, | |
| "learning_rate": 2.428693667997718e-06, | |
| "loss": 0.0019, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 2.324345915717995, | |
| "grad_norm": 1.9931162595748901, | |
| "learning_rate": 2.414432401597262e-06, | |
| "loss": 0.003, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 2.3283403235470344, | |
| "grad_norm": 0.2455291897058487, | |
| "learning_rate": 2.400171135196806e-06, | |
| "loss": 0.0013, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 2.3323347313760734, | |
| "grad_norm": 0.18468691408634186, | |
| "learning_rate": 2.3859098687963496e-06, | |
| "loss": 0.0038, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 2.336329139205113, | |
| "grad_norm": 0.2789057493209839, | |
| "learning_rate": 2.371648602395893e-06, | |
| "loss": 0.002, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 2.3403235470341524, | |
| "grad_norm": 0.3302832543849945, | |
| "learning_rate": 2.3573873359954365e-06, | |
| "loss": 0.0021, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 2.3443179548631914, | |
| "grad_norm": 0.3988155722618103, | |
| "learning_rate": 2.34312606959498e-06, | |
| "loss": 0.0018, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 2.348312362692231, | |
| "grad_norm": 0.5014130473136902, | |
| "learning_rate": 2.328864803194524e-06, | |
| "loss": 0.0017, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 2.3523067705212704, | |
| "grad_norm": 0.31072625517845154, | |
| "learning_rate": 2.3146035367940675e-06, | |
| "loss": 0.0023, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 2.3563011783503094, | |
| "grad_norm": 0.13283780217170715, | |
| "learning_rate": 2.300342270393611e-06, | |
| "loss": 0.0015, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 2.360295586179349, | |
| "grad_norm": 0.48449787497520447, | |
| "learning_rate": 2.286081003993155e-06, | |
| "loss": 0.0018, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 2.3642899940083884, | |
| "grad_norm": 0.6114631295204163, | |
| "learning_rate": 2.2718197375926985e-06, | |
| "loss": 0.0018, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 2.3682844018374274, | |
| "grad_norm": 0.3139302730560303, | |
| "learning_rate": 2.257558471192242e-06, | |
| "loss": 0.0011, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 2.372278809666467, | |
| "grad_norm": 0.6530600786209106, | |
| "learning_rate": 2.2432972047917854e-06, | |
| "loss": 0.0027, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 2.3762732174955064, | |
| "grad_norm": 0.30340439081192017, | |
| "learning_rate": 2.229035938391329e-06, | |
| "loss": 0.0013, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 2.380267625324546, | |
| "grad_norm": 0.19712309539318085, | |
| "learning_rate": 2.214774671990873e-06, | |
| "loss": 0.0014, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 2.384262033153585, | |
| "grad_norm": 0.4613218307495117, | |
| "learning_rate": 2.2005134055904164e-06, | |
| "loss": 0.0019, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 2.3882564409826244, | |
| "grad_norm": 0.3004595637321472, | |
| "learning_rate": 2.18625213918996e-06, | |
| "loss": 0.0021, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 2.392250848811664, | |
| "grad_norm": 0.07407541573047638, | |
| "learning_rate": 2.171990872789504e-06, | |
| "loss": 0.0019, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 2.396245256640703, | |
| "grad_norm": 0.16345179080963135, | |
| "learning_rate": 2.1577296063890475e-06, | |
| "loss": 0.0012, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 2.396245256640703, | |
| "eval_loss": 0.006617086939513683, | |
| "eval_runtime": 7437.099, | |
| "eval_samples_per_second": 2.693, | |
| "eval_steps_per_second": 0.337, | |
| "eval_wer": 0.5057546679788949, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 2.4002396644697424, | |
| "grad_norm": 0.5397946834564209, | |
| "learning_rate": 2.143468339988591e-06, | |
| "loss": 0.0032, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 2.404234072298782, | |
| "grad_norm": 0.1278027594089508, | |
| "learning_rate": 2.129207073588135e-06, | |
| "loss": 0.0011, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 2.408228480127821, | |
| "grad_norm": 0.34275180101394653, | |
| "learning_rate": 2.1149458071876785e-06, | |
| "loss": 0.0011, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 2.4122228879568604, | |
| "grad_norm": 0.25335317850112915, | |
| "learning_rate": 2.100684540787222e-06, | |
| "loss": 0.0024, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 2.4162172957859, | |
| "grad_norm": 0.1778995245695114, | |
| "learning_rate": 2.086423274386766e-06, | |
| "loss": 0.0024, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 2.420211703614939, | |
| "grad_norm": 0.10461229830980301, | |
| "learning_rate": 2.0721620079863095e-06, | |
| "loss": 0.0011, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 2.4242061114439784, | |
| "grad_norm": 0.10788627713918686, | |
| "learning_rate": 2.0579007415858527e-06, | |
| "loss": 0.0022, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 2.428200519273018, | |
| "grad_norm": 0.5851423740386963, | |
| "learning_rate": 2.043639475185397e-06, | |
| "loss": 0.0017, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 2.4321949271020573, | |
| "grad_norm": 0.11152782291173935, | |
| "learning_rate": 2.0293782087849405e-06, | |
| "loss": 0.0013, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 2.4361893349310964, | |
| "grad_norm": 0.9274498224258423, | |
| "learning_rate": 2.0151169423844837e-06, | |
| "loss": 0.0024, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 2.440183742760136, | |
| "grad_norm": 0.10081840306520462, | |
| "learning_rate": 2.0008556759840274e-06, | |
| "loss": 0.0023, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 2.4441781505891753, | |
| "grad_norm": 0.08972252160310745, | |
| "learning_rate": 1.986594409583571e-06, | |
| "loss": 0.0012, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 2.4481725584182144, | |
| "grad_norm": 0.32298198342323303, | |
| "learning_rate": 1.9723331431831148e-06, | |
| "loss": 0.0021, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 2.452166966247254, | |
| "grad_norm": 0.10888329893350601, | |
| "learning_rate": 1.9580718767826584e-06, | |
| "loss": 0.0016, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 2.4561613740762933, | |
| "grad_norm": 9.623656272888184, | |
| "learning_rate": 1.943810610382202e-06, | |
| "loss": 0.0059, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 2.4601557819053324, | |
| "grad_norm": 0.10817322880029678, | |
| "learning_rate": 1.9295493439817458e-06, | |
| "loss": 0.001, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 2.464150189734372, | |
| "grad_norm": 2.2221720218658447, | |
| "learning_rate": 1.9152880775812894e-06, | |
| "loss": 0.0018, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 2.4681445975634113, | |
| "grad_norm": 0.6658632159233093, | |
| "learning_rate": 1.9010268111808331e-06, | |
| "loss": 0.0009, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 2.4721390053924504, | |
| "grad_norm": 0.45321881771087646, | |
| "learning_rate": 1.8867655447803768e-06, | |
| "loss": 0.0019, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 2.47613341322149, | |
| "grad_norm": 0.3559039533138275, | |
| "learning_rate": 1.8725042783799202e-06, | |
| "loss": 0.0009, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 2.4801278210505293, | |
| "grad_norm": 0.1388717144727707, | |
| "learning_rate": 1.858243011979464e-06, | |
| "loss": 0.0018, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 2.484122228879569, | |
| "grad_norm": 1.1052217483520508, | |
| "learning_rate": 1.8439817455790076e-06, | |
| "loss": 0.0019, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 2.488116636708608, | |
| "grad_norm": 0.2527538239955902, | |
| "learning_rate": 1.829720479178551e-06, | |
| "loss": 0.0017, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 2.4921110445376473, | |
| "grad_norm": 0.21638405323028564, | |
| "learning_rate": 1.815459212778095e-06, | |
| "loss": 0.0016, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 2.496105452366687, | |
| "grad_norm": 0.2525235414505005, | |
| "learning_rate": 1.8011979463776386e-06, | |
| "loss": 0.0011, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 2.500099860195726, | |
| "grad_norm": 0.204837828874588, | |
| "learning_rate": 1.786936679977182e-06, | |
| "loss": 0.002, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 2.5040942680247653, | |
| "grad_norm": 0.3697673976421356, | |
| "learning_rate": 1.7726754135767257e-06, | |
| "loss": 0.0035, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 2.508088675853805, | |
| "grad_norm": 1.4454883337020874, | |
| "learning_rate": 1.7584141471762694e-06, | |
| "loss": 0.0019, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 2.5120830836828443, | |
| "grad_norm": 0.2247629016637802, | |
| "learning_rate": 1.7441528807758129e-06, | |
| "loss": 0.0011, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 2.5160774915118833, | |
| "grad_norm": 0.12582184374332428, | |
| "learning_rate": 1.7298916143753565e-06, | |
| "loss": 0.0011, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 2.520071899340923, | |
| "grad_norm": 0.09849651902914047, | |
| "learning_rate": 1.7156303479749004e-06, | |
| "loss": 0.0015, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 2.524066307169962, | |
| "grad_norm": 0.1161109134554863, | |
| "learning_rate": 1.701369081574444e-06, | |
| "loss": 0.0024, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 2.5280607149990013, | |
| "grad_norm": 0.13831503689289093, | |
| "learning_rate": 1.6871078151739875e-06, | |
| "loss": 0.0017, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 2.532055122828041, | |
| "grad_norm": 0.19472339749336243, | |
| "learning_rate": 1.6728465487735312e-06, | |
| "loss": 0.0033, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 2.5360495306570803, | |
| "grad_norm": 0.4595012664794922, | |
| "learning_rate": 1.6585852823730749e-06, | |
| "loss": 0.0015, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 2.5400439384861193, | |
| "grad_norm": 0.09621255099773407, | |
| "learning_rate": 1.6443240159726183e-06, | |
| "loss": 0.0033, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 2.544038346315159, | |
| "grad_norm": 1.4646393060684204, | |
| "learning_rate": 1.6300627495721622e-06, | |
| "loss": 0.0017, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 2.5480327541441983, | |
| "grad_norm": 0.16254696249961853, | |
| "learning_rate": 1.615801483171706e-06, | |
| "loss": 0.0016, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 2.5520271619732373, | |
| "grad_norm": 0.16140027344226837, | |
| "learning_rate": 1.6015402167712494e-06, | |
| "loss": 0.0013, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 2.556021569802277, | |
| "grad_norm": 0.14379070699214935, | |
| "learning_rate": 1.587278950370793e-06, | |
| "loss": 0.0027, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 2.5600159776313163, | |
| "grad_norm": 0.18138810992240906, | |
| "learning_rate": 1.5730176839703367e-06, | |
| "loss": 0.0008, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 2.5640103854603558, | |
| "grad_norm": 0.048751723021268845, | |
| "learning_rate": 1.5587564175698802e-06, | |
| "loss": 0.004, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 2.568004793289395, | |
| "grad_norm": 0.16989260911941528, | |
| "learning_rate": 1.544495151169424e-06, | |
| "loss": 0.0014, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 2.5719992011184343, | |
| "grad_norm": 0.29287806153297424, | |
| "learning_rate": 1.5302338847689677e-06, | |
| "loss": 0.0022, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 2.5759936089474733, | |
| "grad_norm": 0.12781338393688202, | |
| "learning_rate": 1.5159726183685112e-06, | |
| "loss": 0.0013, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 2.579988016776513, | |
| "grad_norm": 0.21234439313411713, | |
| "learning_rate": 1.5017113519680548e-06, | |
| "loss": 0.0013, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 2.5839824246055523, | |
| "grad_norm": 0.5938289761543274, | |
| "learning_rate": 1.4874500855675985e-06, | |
| "loss": 0.0012, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 2.5879768324345918, | |
| "grad_norm": 0.5471766591072083, | |
| "learning_rate": 1.4731888191671424e-06, | |
| "loss": 0.0026, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 2.591971240263631, | |
| "grad_norm": 0.23329491913318634, | |
| "learning_rate": 1.4589275527666856e-06, | |
| "loss": 0.0015, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 2.5959656480926703, | |
| "grad_norm": 1.038935899734497, | |
| "learning_rate": 1.4446662863662295e-06, | |
| "loss": 0.0015, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 2.5999600559217098, | |
| "grad_norm": 0.16698375344276428, | |
| "learning_rate": 1.4304050199657732e-06, | |
| "loss": 0.0013, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 2.603954463750749, | |
| "grad_norm": 0.10930318385362625, | |
| "learning_rate": 1.4161437535653167e-06, | |
| "loss": 0.0024, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 2.6079488715797883, | |
| "grad_norm": 0.24214453995227814, | |
| "learning_rate": 1.4018824871648603e-06, | |
| "loss": 0.0026, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 2.6119432794088278, | |
| "grad_norm": 0.1356784552335739, | |
| "learning_rate": 1.387621220764404e-06, | |
| "loss": 0.002, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 2.6159376872378672, | |
| "grad_norm": 0.10308735817670822, | |
| "learning_rate": 1.3733599543639475e-06, | |
| "loss": 0.0031, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 2.6199320950669063, | |
| "grad_norm": 0.5427199006080627, | |
| "learning_rate": 1.3590986879634913e-06, | |
| "loss": 0.0033, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 2.6239265028959458, | |
| "grad_norm": 0.4210723340511322, | |
| "learning_rate": 1.344837421563035e-06, | |
| "loss": 0.001, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 2.627920910724985, | |
| "grad_norm": 0.14636996388435364, | |
| "learning_rate": 1.3305761551625785e-06, | |
| "loss": 0.0026, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 2.6319153185540243, | |
| "grad_norm": 0.14456219971179962, | |
| "learning_rate": 1.3163148887621221e-06, | |
| "loss": 0.0014, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 2.6359097263830638, | |
| "grad_norm": 0.206597238779068, | |
| "learning_rate": 1.3020536223616658e-06, | |
| "loss": 0.0012, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 2.6399041342121032, | |
| "grad_norm": 0.11497008800506592, | |
| "learning_rate": 1.2877923559612093e-06, | |
| "loss": 0.0016, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 2.6438985420411423, | |
| "grad_norm": 0.18552027642726898, | |
| "learning_rate": 1.2735310895607532e-06, | |
| "loss": 0.0037, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 2.6478929498701818, | |
| "grad_norm": 0.35586246848106384, | |
| "learning_rate": 1.2592698231602968e-06, | |
| "loss": 0.0018, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 2.6518873576992212, | |
| "grad_norm": 0.39738965034484863, | |
| "learning_rate": 1.2450085567598403e-06, | |
| "loss": 0.0026, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 2.6558817655282603, | |
| "grad_norm": 0.4074229300022125, | |
| "learning_rate": 1.230747290359384e-06, | |
| "loss": 0.0012, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 2.6598761733572998, | |
| "grad_norm": 0.08064166456460953, | |
| "learning_rate": 1.2164860239589276e-06, | |
| "loss": 0.0025, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 2.6638705811863392, | |
| "grad_norm": 0.8385900855064392, | |
| "learning_rate": 1.2022247575584713e-06, | |
| "loss": 0.0014, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 2.6678649890153787, | |
| "grad_norm": 0.24224676191806793, | |
| "learning_rate": 1.187963491158015e-06, | |
| "loss": 0.0024, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 2.6718593968444178, | |
| "grad_norm": 0.11100082844495773, | |
| "learning_rate": 1.1737022247575586e-06, | |
| "loss": 0.0015, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 2.6758538046734572, | |
| "grad_norm": 0.49067580699920654, | |
| "learning_rate": 1.159440958357102e-06, | |
| "loss": 0.0017, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 2.6798482125024963, | |
| "grad_norm": 0.17083971202373505, | |
| "learning_rate": 1.145179691956646e-06, | |
| "loss": 0.0034, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 2.6838426203315358, | |
| "grad_norm": 0.16969099640846252, | |
| "learning_rate": 1.1309184255561894e-06, | |
| "loss": 0.0014, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 2.6878370281605752, | |
| "grad_norm": 0.9469853043556213, | |
| "learning_rate": 1.1166571591557331e-06, | |
| "loss": 0.0024, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 2.6918314359896147, | |
| "grad_norm": 0.0797346830368042, | |
| "learning_rate": 1.1023958927552768e-06, | |
| "loss": 0.0026, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 2.6958258438186538, | |
| "grad_norm": 0.1065623015165329, | |
| "learning_rate": 1.0881346263548204e-06, | |
| "loss": 0.0012, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 2.6998202516476932, | |
| "grad_norm": 1.0359104871749878, | |
| "learning_rate": 1.0738733599543641e-06, | |
| "loss": 0.0027, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 2.7038146594767327, | |
| "grad_norm": 0.16578173637390137, | |
| "learning_rate": 1.0596120935539076e-06, | |
| "loss": 0.003, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 2.7078090673057718, | |
| "grad_norm": 0.04761478304862976, | |
| "learning_rate": 1.0453508271534513e-06, | |
| "loss": 0.0024, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 2.7118034751348112, | |
| "grad_norm": 1.9014323949813843, | |
| "learning_rate": 1.031089560752995e-06, | |
| "loss": 0.0023, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 2.7157978829638507, | |
| "grad_norm": 0.3101843297481537, | |
| "learning_rate": 1.0168282943525386e-06, | |
| "loss": 0.0021, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 2.71979229079289, | |
| "grad_norm": 0.282959520816803, | |
| "learning_rate": 1.0025670279520823e-06, | |
| "loss": 0.0018, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 2.7237866986219292, | |
| "grad_norm": 0.06943622976541519, | |
| "learning_rate": 9.88305761551626e-07, | |
| "loss": 0.0012, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 2.7277811064509687, | |
| "grad_norm": 0.3266574740409851, | |
| "learning_rate": 9.740444951511694e-07, | |
| "loss": 0.0019, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 2.7317755142800078, | |
| "grad_norm": 0.3858787715435028, | |
| "learning_rate": 9.597832287507133e-07, | |
| "loss": 0.0019, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 2.7357699221090472, | |
| "grad_norm": 0.28762754797935486, | |
| "learning_rate": 9.455219623502567e-07, | |
| "loss": 0.0013, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 2.7397643299380867, | |
| "grad_norm": 0.1022636890411377, | |
| "learning_rate": 9.312606959498004e-07, | |
| "loss": 0.0015, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 2.743758737767126, | |
| "grad_norm": 0.8769612908363342, | |
| "learning_rate": 9.169994295493441e-07, | |
| "loss": 0.0018, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 2.7477531455961652, | |
| "grad_norm": 0.20930863916873932, | |
| "learning_rate": 9.027381631488876e-07, | |
| "loss": 0.0018, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 2.7517475534252047, | |
| "grad_norm": 0.08737757056951523, | |
| "learning_rate": 8.884768967484313e-07, | |
| "loss": 0.0012, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 2.755741961254244, | |
| "grad_norm": 0.20543090999126434, | |
| "learning_rate": 8.74215630347975e-07, | |
| "loss": 0.001, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 2.7597363690832832, | |
| "grad_norm": 0.21611656248569489, | |
| "learning_rate": 8.599543639475185e-07, | |
| "loss": 0.0038, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 2.7637307769123227, | |
| "grad_norm": 0.1640675812959671, | |
| "learning_rate": 8.456930975470623e-07, | |
| "loss": 0.0027, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 2.767725184741362, | |
| "grad_norm": 0.31564095616340637, | |
| "learning_rate": 8.314318311466059e-07, | |
| "loss": 0.001, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 2.7717195925704017, | |
| "grad_norm": 1.0901868343353271, | |
| "learning_rate": 8.171705647461495e-07, | |
| "loss": 0.0017, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 2.7757140003994407, | |
| "grad_norm": 0.3010713458061218, | |
| "learning_rate": 8.029092983456932e-07, | |
| "loss": 0.001, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 2.77970840822848, | |
| "grad_norm": 0.04908519238233566, | |
| "learning_rate": 7.886480319452368e-07, | |
| "loss": 0.0034, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 2.7837028160575192, | |
| "grad_norm": 0.07861047983169556, | |
| "learning_rate": 7.743867655447804e-07, | |
| "loss": 0.003, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 2.7876972238865587, | |
| "grad_norm": 0.9499324560165405, | |
| "learning_rate": 7.601254991443241e-07, | |
| "loss": 0.0016, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 2.791691631715598, | |
| "grad_norm": 0.11067607998847961, | |
| "learning_rate": 7.458642327438677e-07, | |
| "loss": 0.002, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 2.7956860395446377, | |
| "grad_norm": 0.23731118440628052, | |
| "learning_rate": 7.316029663434114e-07, | |
| "loss": 0.0021, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 2.7996804473736767, | |
| "grad_norm": 0.5374778509140015, | |
| "learning_rate": 7.17341699942955e-07, | |
| "loss": 0.0013, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 2.803674855202716, | |
| "grad_norm": 0.13841059803962708, | |
| "learning_rate": 7.030804335424986e-07, | |
| "loss": 0.0036, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 2.8076692630317557, | |
| "grad_norm": 0.13221804797649384, | |
| "learning_rate": 6.888191671420423e-07, | |
| "loss": 0.0013, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 2.8116636708607947, | |
| "grad_norm": 0.20090872049331665, | |
| "learning_rate": 6.745579007415858e-07, | |
| "loss": 0.0017, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 2.815658078689834, | |
| "grad_norm": 0.06092801317572594, | |
| "learning_rate": 6.602966343411295e-07, | |
| "loss": 0.0015, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 2.8196524865188737, | |
| "grad_norm": 0.7219746112823486, | |
| "learning_rate": 6.460353679406732e-07, | |
| "loss": 0.0014, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 2.823646894347913, | |
| "grad_norm": 0.11110103875398636, | |
| "learning_rate": 6.317741015402168e-07, | |
| "loss": 0.001, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 2.827641302176952, | |
| "grad_norm": 0.09267082810401917, | |
| "learning_rate": 6.175128351397604e-07, | |
| "loss": 0.0013, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 2.8316357100059917, | |
| "grad_norm": 0.19514226913452148, | |
| "learning_rate": 6.032515687393041e-07, | |
| "loss": 0.0027, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 2.8356301178350307, | |
| "grad_norm": 0.09698819369077682, | |
| "learning_rate": 5.889903023388478e-07, | |
| "loss": 0.0034, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 2.83962452566407, | |
| "grad_norm": 1.0999977588653564, | |
| "learning_rate": 5.747290359383914e-07, | |
| "loss": 0.0016, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 2.8436189334931097, | |
| "grad_norm": 0.27861669659614563, | |
| "learning_rate": 5.60467769537935e-07, | |
| "loss": 0.0021, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 2.847613341322149, | |
| "grad_norm": 0.6540320515632629, | |
| "learning_rate": 5.462065031374787e-07, | |
| "loss": 0.0027, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 2.851607749151188, | |
| "grad_norm": 0.719205379486084, | |
| "learning_rate": 5.319452367370223e-07, | |
| "loss": 0.0009, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 2.8556021569802277, | |
| "grad_norm": 0.08803220093250275, | |
| "learning_rate": 5.17683970336566e-07, | |
| "loss": 0.0016, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 2.859596564809267, | |
| "grad_norm": 0.4269472658634186, | |
| "learning_rate": 5.034227039361096e-07, | |
| "loss": 0.0024, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 2.863590972638306, | |
| "grad_norm": 1.9801479578018188, | |
| "learning_rate": 4.891614375356533e-07, | |
| "loss": 0.0021, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 2.8675853804673457, | |
| "grad_norm": 0.14175833761692047, | |
| "learning_rate": 4.7490017113519687e-07, | |
| "loss": 0.001, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 2.871579788296385, | |
| "grad_norm": 0.3901681900024414, | |
| "learning_rate": 4.6063890473474054e-07, | |
| "loss": 0.0024, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 2.8755741961254246, | |
| "grad_norm": 0.20490513741970062, | |
| "learning_rate": 4.463776383342841e-07, | |
| "loss": 0.002, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 2.8795686039544637, | |
| "grad_norm": 0.06986338645219803, | |
| "learning_rate": 4.321163719338278e-07, | |
| "loss": 0.0011, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 2.883563011783503, | |
| "grad_norm": 0.2736944258213043, | |
| "learning_rate": 4.178551055333714e-07, | |
| "loss": 0.0015, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 2.887557419612542, | |
| "grad_norm": 0.39138877391815186, | |
| "learning_rate": 4.0359383913291507e-07, | |
| "loss": 0.0048, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 2.8915518274415817, | |
| "grad_norm": 0.3493710160255432, | |
| "learning_rate": 3.8933257273245863e-07, | |
| "loss": 0.0014, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 2.895546235270621, | |
| "grad_norm": 0.47019582986831665, | |
| "learning_rate": 3.750713063320023e-07, | |
| "loss": 0.0025, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 2.8995406430996606, | |
| "grad_norm": 0.09903858602046967, | |
| "learning_rate": 3.6081003993154597e-07, | |
| "loss": 0.0018, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 2.9035350509286997, | |
| "grad_norm": 0.13944830000400543, | |
| "learning_rate": 3.4654877353108964e-07, | |
| "loss": 0.0013, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 2.907529458757739, | |
| "grad_norm": 0.6667160987854004, | |
| "learning_rate": 3.322875071306332e-07, | |
| "loss": 0.0013, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 2.9115238665867786, | |
| "grad_norm": 0.22438709437847137, | |
| "learning_rate": 3.180262407301769e-07, | |
| "loss": 0.0011, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 2.9155182744158177, | |
| "grad_norm": 0.07875990122556686, | |
| "learning_rate": 3.037649743297205e-07, | |
| "loss": 0.0019, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 2.919512682244857, | |
| "grad_norm": 1.9263901710510254, | |
| "learning_rate": 2.895037079292641e-07, | |
| "loss": 0.0019, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 2.9235070900738966, | |
| "grad_norm": 0.09846038371324539, | |
| "learning_rate": 2.752424415288078e-07, | |
| "loss": 0.0019, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 2.927501497902936, | |
| "grad_norm": 0.10731885582208633, | |
| "learning_rate": 2.609811751283514e-07, | |
| "loss": 0.0012, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 2.931495905731975, | |
| "grad_norm": 0.0784291997551918, | |
| "learning_rate": 2.467199087278951e-07, | |
| "loss": 0.0017, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 2.9354903135610146, | |
| "grad_norm": 0.05892806872725487, | |
| "learning_rate": 2.324586423274387e-07, | |
| "loss": 0.0007, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 2.9394847213900537, | |
| "grad_norm": 1.340722918510437, | |
| "learning_rate": 2.1819737592698234e-07, | |
| "loss": 0.0037, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 2.943479129219093, | |
| "grad_norm": 0.2646709382534027, | |
| "learning_rate": 2.0393610952652596e-07, | |
| "loss": 0.0015, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 2.9474735370481326, | |
| "grad_norm": 0.35851484537124634, | |
| "learning_rate": 1.8967484312606963e-07, | |
| "loss": 0.0009, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 2.951467944877172, | |
| "grad_norm": 0.664960503578186, | |
| "learning_rate": 1.7541357672561324e-07, | |
| "loss": 0.0009, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 2.955462352706211, | |
| "grad_norm": 0.14400076866149902, | |
| "learning_rate": 1.611523103251569e-07, | |
| "loss": 0.0012, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 2.9594567605352506, | |
| "grad_norm": 0.0934806764125824, | |
| "learning_rate": 1.4689104392470053e-07, | |
| "loss": 0.0018, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 2.96345116836429, | |
| "grad_norm": 0.12735722959041595, | |
| "learning_rate": 1.3262977752424415e-07, | |
| "loss": 0.0025, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 2.967445576193329, | |
| "grad_norm": 0.819703221321106, | |
| "learning_rate": 1.183685111237878e-07, | |
| "loss": 0.0014, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 2.9714399840223686, | |
| "grad_norm": 0.10716011375188828, | |
| "learning_rate": 1.0410724472333144e-07, | |
| "loss": 0.0012, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 2.975434391851408, | |
| "grad_norm": 0.9824792742729187, | |
| "learning_rate": 8.984597832287507e-08, | |
| "loss": 0.0023, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 2.9794287996804476, | |
| "grad_norm": 0.20575283467769623, | |
| "learning_rate": 7.558471192241872e-08, | |
| "loss": 0.0019, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 2.9834232075094866, | |
| "grad_norm": 0.10139793157577515, | |
| "learning_rate": 6.132344552196236e-08, | |
| "loss": 0.004, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 2.987417615338526, | |
| "grad_norm": 0.4164970815181732, | |
| "learning_rate": 4.7062179121506e-08, | |
| "loss": 0.0012, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 2.991412023167565, | |
| "grad_norm": 0.10282018035650253, | |
| "learning_rate": 3.280091272104963e-08, | |
| "loss": 0.0019, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 2.9954064309966046, | |
| "grad_norm": 0.08646287769079208, | |
| "learning_rate": 1.853964632059327e-08, | |
| "loss": 0.001, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 2.999400838825644, | |
| "grad_norm": 0.25076162815093994, | |
| "learning_rate": 4.278379920136909e-09, | |
| "loss": 0.0015, | |
| "step": 7510 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 7512, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.9347071696896e+19, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |