| { | |
| "best_global_step": 2000, | |
| "best_metric": 1.2956099257632903, | |
| "best_model_checkpoint": "./SALAMA_NEW6/checkpoint-2000", | |
| "epoch": 0.798881565807869, | |
| "eval_steps": 2000, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003994407829039345, | |
| "grad_norm": 3.5260863304138184, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.0171, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.00798881565807869, | |
| "grad_norm": 3.8677453994750977, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.0168, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.011983223487118035, | |
| "grad_norm": 2.2200398445129395, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.0143, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01597763131615738, | |
| "grad_norm": 2.8865885734558105, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.0148, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.019972039145196723, | |
| "grad_norm": 2.37961483001709, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.0278, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02396644697423607, | |
| "grad_norm": 0.550510585308075, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.01, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.027960854803275415, | |
| "grad_norm": 0.5346644520759583, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.0095, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03195526263231476, | |
| "grad_norm": 0.3471826910972595, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.0102, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.035949670461354104, | |
| "grad_norm": 0.6894211769104004, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.0097, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.039944078290393446, | |
| "grad_norm": 1.4083952903747559, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.0145, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.043938486119432796, | |
| "grad_norm": 0.9561022520065308, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.0099, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04793289394847214, | |
| "grad_norm": 0.5757145285606384, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.0078, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.05192730177751148, | |
| "grad_norm": 0.9376910924911499, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.0135, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.05592170960655083, | |
| "grad_norm": 1.6390388011932373, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.0157, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05991611743559017, | |
| "grad_norm": 1.0386844873428345, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.0099, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06391052526462952, | |
| "grad_norm": 1.7207807302474976, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.0096, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06790493309366886, | |
| "grad_norm": 0.7114217877388, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.0137, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.07189934092270821, | |
| "grad_norm": 2.062455654144287, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0181, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07589374875174755, | |
| "grad_norm": 3.8748466968536377, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.0155, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.07988815658078689, | |
| "grad_norm": 2.8697941303253174, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.0206, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08388256440982625, | |
| "grad_norm": 1.5111762285232544, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.014, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.08787697223886559, | |
| "grad_norm": 2.7273306846618652, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.0115, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09187138006790493, | |
| "grad_norm": 1.604333758354187, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0111, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09586578789694428, | |
| "grad_norm": 1.6656010150909424, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.0103, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.09986019572598362, | |
| "grad_norm": 1.9466540813446045, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.0118, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10385460355502296, | |
| "grad_norm": 0.7741212844848633, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.0203, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.10784901138406232, | |
| "grad_norm": 1.5594321489334106, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.0132, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.11184341921310166, | |
| "grad_norm": 2.1854960918426514, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.0227, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.115837827042141, | |
| "grad_norm": 1.809208869934082, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.014, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.11983223487118035, | |
| "grad_norm": 2.003018617630005, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.0253, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12382664270021969, | |
| "grad_norm": 2.226447582244873, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.0141, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.12782105052925904, | |
| "grad_norm": 3.213719367980957, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.0222, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1318154583582984, | |
| "grad_norm": 2.7083969116210938, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.0165, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.13580986618733773, | |
| "grad_norm": 2.327775001525879, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.0127, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.13980427401637707, | |
| "grad_norm": 2.199615478515625, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.0092, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.14379868184541642, | |
| "grad_norm": 2.6200337409973145, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.0229, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.14779308967445576, | |
| "grad_norm": 2.0899276733398438, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.0123, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1517874975034951, | |
| "grad_norm": 3.313321113586426, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.0225, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.15578190533253444, | |
| "grad_norm": 2.0713999271392822, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.0166, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.15977631316157379, | |
| "grad_norm": 3.2806386947631836, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.0143, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.16377072099061313, | |
| "grad_norm": 2.053285598754883, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.0183, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.1677651288196525, | |
| "grad_norm": 1.4635854959487915, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.0198, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.17175953664869184, | |
| "grad_norm": 1.461671233177185, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.0223, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.17575394447773118, | |
| "grad_norm": 1.910568118095398, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.0127, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.17974835230677053, | |
| "grad_norm": 2.9844930171966553, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.0198, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.18374276013580987, | |
| "grad_norm": 3.3782155513763428, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.0277, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.1877371679648492, | |
| "grad_norm": 2.6012775897979736, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.0154, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.19173157579388855, | |
| "grad_norm": 2.880096197128296, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.0219, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.1957259836229279, | |
| "grad_norm": 3.002333879470825, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.0179, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.19972039145196724, | |
| "grad_norm": 6.162044525146484, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.0165, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.20371479928100658, | |
| "grad_norm": 3.4659605026245117, | |
| "learning_rate": 9.98716486023959e-06, | |
| "loss": 0.0239, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.20770920711004592, | |
| "grad_norm": 3.2337770462036133, | |
| "learning_rate": 9.972903593839133e-06, | |
| "loss": 0.017, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.2117036149390853, | |
| "grad_norm": 1.7297512292861938, | |
| "learning_rate": 9.958642327438678e-06, | |
| "loss": 0.0197, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.21569802276812464, | |
| "grad_norm": 3.336660385131836, | |
| "learning_rate": 9.944381061038221e-06, | |
| "loss": 0.0215, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.21969243059716398, | |
| "grad_norm": 1.850265383720398, | |
| "learning_rate": 9.930119794637765e-06, | |
| "loss": 0.0259, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.22368683842620332, | |
| "grad_norm": 2.2153711318969727, | |
| "learning_rate": 9.91585852823731e-06, | |
| "loss": 0.0192, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.22768124625524266, | |
| "grad_norm": 1.2140111923217773, | |
| "learning_rate": 9.901597261836851e-06, | |
| "loss": 0.0214, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.231675654084282, | |
| "grad_norm": 2.983187198638916, | |
| "learning_rate": 9.887335995436396e-06, | |
| "loss": 0.0212, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.23567006191332135, | |
| "grad_norm": 1.3873775005340576, | |
| "learning_rate": 9.87307472903594e-06, | |
| "loss": 0.0278, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.2396644697423607, | |
| "grad_norm": 2.8896610736846924, | |
| "learning_rate": 9.858813462635483e-06, | |
| "loss": 0.0188, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.24365887757140003, | |
| "grad_norm": 3.1558310985565186, | |
| "learning_rate": 9.844552196235026e-06, | |
| "loss": 0.0268, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.24765328540043938, | |
| "grad_norm": 1.7268836498260498, | |
| "learning_rate": 9.83029092983457e-06, | |
| "loss": 0.0214, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.2516476932294787, | |
| "grad_norm": 3.3516862392425537, | |
| "learning_rate": 9.816029663434114e-06, | |
| "loss": 0.0264, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.2556421010585181, | |
| "grad_norm": 2.8272225856781006, | |
| "learning_rate": 9.801768397033657e-06, | |
| "loss": 0.0279, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2596365088875574, | |
| "grad_norm": 2.9199869632720947, | |
| "learning_rate": 9.787507130633202e-06, | |
| "loss": 0.0225, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2636309167165968, | |
| "grad_norm": 2.80952787399292, | |
| "learning_rate": 9.773245864232744e-06, | |
| "loss": 0.0188, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.2676253245456361, | |
| "grad_norm": 2.8590474128723145, | |
| "learning_rate": 9.758984597832289e-06, | |
| "loss": 0.0214, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.27161973237467546, | |
| "grad_norm": 2.1354668140411377, | |
| "learning_rate": 9.744723331431832e-06, | |
| "loss": 0.0241, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.2756141402037148, | |
| "grad_norm": 1.54200279712677, | |
| "learning_rate": 9.730462065031375e-06, | |
| "loss": 0.0185, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.27960854803275415, | |
| "grad_norm": 2.3145532608032227, | |
| "learning_rate": 9.71620079863092e-06, | |
| "loss": 0.0162, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2836029558617935, | |
| "grad_norm": 2.7305352687835693, | |
| "learning_rate": 9.701939532230463e-06, | |
| "loss": 0.0246, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.28759736369083283, | |
| "grad_norm": 1.983603596687317, | |
| "learning_rate": 9.687678265830007e-06, | |
| "loss": 0.0236, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2915917715198722, | |
| "grad_norm": 1.5119189023971558, | |
| "learning_rate": 9.67341699942955e-06, | |
| "loss": 0.02, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.2955861793489115, | |
| "grad_norm": 2.9574663639068604, | |
| "learning_rate": 9.659155733029095e-06, | |
| "loss": 0.024, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.2995805871779509, | |
| "grad_norm": 2.4619083404541016, | |
| "learning_rate": 9.644894466628636e-06, | |
| "loss": 0.0267, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.3035749950069902, | |
| "grad_norm": 1.9842376708984375, | |
| "learning_rate": 9.630633200228181e-06, | |
| "loss": 0.0183, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.30756940283602957, | |
| "grad_norm": 2.300018310546875, | |
| "learning_rate": 9.616371933827725e-06, | |
| "loss": 0.022, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.3115638106650689, | |
| "grad_norm": 1.9513983726501465, | |
| "learning_rate": 9.602110667427268e-06, | |
| "loss": 0.0183, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.31555821849410826, | |
| "grad_norm": 1.9509272575378418, | |
| "learning_rate": 9.587849401026813e-06, | |
| "loss": 0.0239, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.31955262632314757, | |
| "grad_norm": 2.2186226844787598, | |
| "learning_rate": 9.573588134626356e-06, | |
| "loss": 0.0244, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.32354703415218694, | |
| "grad_norm": 3.058155059814453, | |
| "learning_rate": 9.5593268682259e-06, | |
| "loss": 0.0225, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.32754144198122626, | |
| "grad_norm": 2.130378484725952, | |
| "learning_rate": 9.545065601825442e-06, | |
| "loss": 0.0302, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.3315358498102656, | |
| "grad_norm": 2.9102509021759033, | |
| "learning_rate": 9.530804335424987e-06, | |
| "loss": 0.0223, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.335530257639305, | |
| "grad_norm": 3.1255991458892822, | |
| "learning_rate": 9.516543069024529e-06, | |
| "loss": 0.0199, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.3395246654683443, | |
| "grad_norm": 1.9874292612075806, | |
| "learning_rate": 9.502281802624074e-06, | |
| "loss": 0.0214, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.3435190732973837, | |
| "grad_norm": 1.9070899486541748, | |
| "learning_rate": 9.488020536223617e-06, | |
| "loss": 0.0283, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.347513481126423, | |
| "grad_norm": 2.783308744430542, | |
| "learning_rate": 9.47375926982316e-06, | |
| "loss": 0.0172, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.35150788895546237, | |
| "grad_norm": 1.1049232482910156, | |
| "learning_rate": 9.459498003422705e-06, | |
| "loss": 0.0261, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.3555022967845017, | |
| "grad_norm": 3.386366844177246, | |
| "learning_rate": 9.445236737022249e-06, | |
| "loss": 0.0214, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.35949670461354105, | |
| "grad_norm": 1.623195767402649, | |
| "learning_rate": 9.430975470621792e-06, | |
| "loss": 0.0221, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.36349111244258037, | |
| "grad_norm": 3.33766508102417, | |
| "learning_rate": 9.416714204221335e-06, | |
| "loss": 0.0245, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.36748552027161974, | |
| "grad_norm": 3.1946351528167725, | |
| "learning_rate": 9.40245293782088e-06, | |
| "loss": 0.0225, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.37147992810065905, | |
| "grad_norm": 2.7116000652313232, | |
| "learning_rate": 9.388191671420423e-06, | |
| "loss": 0.0224, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.3754743359296984, | |
| "grad_norm": 1.7920254468917847, | |
| "learning_rate": 9.373930405019966e-06, | |
| "loss": 0.024, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.3794687437587378, | |
| "grad_norm": 3.112621545791626, | |
| "learning_rate": 9.35966913861951e-06, | |
| "loss": 0.0179, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3834631515877771, | |
| "grad_norm": 2.081601142883301, | |
| "learning_rate": 9.345407872219053e-06, | |
| "loss": 0.0237, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3874575594168165, | |
| "grad_norm": 2.1594202518463135, | |
| "learning_rate": 9.331146605818598e-06, | |
| "loss": 0.0286, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.3914519672458558, | |
| "grad_norm": 2.277575969696045, | |
| "learning_rate": 9.316885339418141e-06, | |
| "loss": 0.0199, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.39544637507489516, | |
| "grad_norm": 2.457400321960449, | |
| "learning_rate": 9.302624073017684e-06, | |
| "loss": 0.0148, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.3994407829039345, | |
| "grad_norm": 2.731627941131592, | |
| "learning_rate": 9.288362806617228e-06, | |
| "loss": 0.026, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.40343519073297385, | |
| "grad_norm": 2.26240873336792, | |
| "learning_rate": 9.274101540216773e-06, | |
| "loss": 0.0205, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.40742959856201316, | |
| "grad_norm": 3.270246982574463, | |
| "learning_rate": 9.259840273816316e-06, | |
| "loss": 0.0245, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.41142400639105253, | |
| "grad_norm": 2.118734836578369, | |
| "learning_rate": 9.245579007415859e-06, | |
| "loss": 0.0244, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.41541841422009185, | |
| "grad_norm": 2.5819344520568848, | |
| "learning_rate": 9.231317741015402e-06, | |
| "loss": 0.0204, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4194128220491312, | |
| "grad_norm": 3.1689200401306152, | |
| "learning_rate": 9.217056474614946e-06, | |
| "loss": 0.0283, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.4234072298781706, | |
| "grad_norm": 2.6276655197143555, | |
| "learning_rate": 9.20279520821449e-06, | |
| "loss": 0.022, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.4274016377072099, | |
| "grad_norm": 3.010298013687134, | |
| "learning_rate": 9.188533941814034e-06, | |
| "loss": 0.0355, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4313960455362493, | |
| "grad_norm": 2.780543327331543, | |
| "learning_rate": 9.174272675413579e-06, | |
| "loss": 0.0341, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.4353904533652886, | |
| "grad_norm": 2.277057647705078, | |
| "learning_rate": 9.16001140901312e-06, | |
| "loss": 0.0239, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.43938486119432796, | |
| "grad_norm": 1.640129566192627, | |
| "learning_rate": 9.145750142612665e-06, | |
| "loss": 0.0228, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.4433792690233673, | |
| "grad_norm": 3.9390289783477783, | |
| "learning_rate": 9.131488876212208e-06, | |
| "loss": 0.0299, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.44737367685240664, | |
| "grad_norm": 1.726510763168335, | |
| "learning_rate": 9.117227609811752e-06, | |
| "loss": 0.0207, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.45136808468144596, | |
| "grad_norm": 1.583237648010254, | |
| "learning_rate": 9.102966343411297e-06, | |
| "loss": 0.018, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.45536249251048533, | |
| "grad_norm": 3.3998148441314697, | |
| "learning_rate": 9.08870507701084e-06, | |
| "loss": 0.0333, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.45935690033952464, | |
| "grad_norm": 1.7249724864959717, | |
| "learning_rate": 9.074443810610383e-06, | |
| "loss": 0.0244, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.463351308168564, | |
| "grad_norm": 2.0827536582946777, | |
| "learning_rate": 9.060182544209926e-06, | |
| "loss": 0.0295, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.46734571599760333, | |
| "grad_norm": 2.095423698425293, | |
| "learning_rate": 9.045921277809471e-06, | |
| "loss": 0.025, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.4713401238266427, | |
| "grad_norm": 2.7629899978637695, | |
| "learning_rate": 9.031660011409013e-06, | |
| "loss": 0.0139, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.47533453165568207, | |
| "grad_norm": 1.6056689023971558, | |
| "learning_rate": 9.017398745008558e-06, | |
| "loss": 0.0379, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.4793289394847214, | |
| "grad_norm": 2.5653021335601807, | |
| "learning_rate": 9.003137478608101e-06, | |
| "loss": 0.0236, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.48332334731376075, | |
| "grad_norm": 2.9385814666748047, | |
| "learning_rate": 8.988876212207644e-06, | |
| "loss": 0.035, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.48731775514280007, | |
| "grad_norm": 3.0189478397369385, | |
| "learning_rate": 8.97461494580719e-06, | |
| "loss": 0.0187, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.49131216297183944, | |
| "grad_norm": 1.1368498802185059, | |
| "learning_rate": 8.960353679406733e-06, | |
| "loss": 0.0205, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.49530657080087875, | |
| "grad_norm": 2.0715737342834473, | |
| "learning_rate": 8.946092413006276e-06, | |
| "loss": 0.021, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.4993009786299181, | |
| "grad_norm": 2.2337212562561035, | |
| "learning_rate": 8.931831146605819e-06, | |
| "loss": 0.0216, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.5032953864589574, | |
| "grad_norm": 1.4322441816329956, | |
| "learning_rate": 8.917569880205364e-06, | |
| "loss": 0.0188, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5072897942879968, | |
| "grad_norm": 1.9030108451843262, | |
| "learning_rate": 8.903308613804906e-06, | |
| "loss": 0.0177, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.5112842021170362, | |
| "grad_norm": 2.806227684020996, | |
| "learning_rate": 8.88904734740445e-06, | |
| "loss": 0.0166, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5152786099460755, | |
| "grad_norm": 3.692470073699951, | |
| "learning_rate": 8.874786081003994e-06, | |
| "loss": 0.032, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.5192730177751148, | |
| "grad_norm": 2.1846556663513184, | |
| "learning_rate": 8.860524814603537e-06, | |
| "loss": 0.0203, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5232674256041542, | |
| "grad_norm": 2.7069506645202637, | |
| "learning_rate": 8.846263548203082e-06, | |
| "loss": 0.0216, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5272618334331935, | |
| "grad_norm": 3.010519027709961, | |
| "learning_rate": 8.832002281802625e-06, | |
| "loss": 0.0291, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5312562412622329, | |
| "grad_norm": 2.396578073501587, | |
| "learning_rate": 8.817741015402168e-06, | |
| "loss": 0.0315, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5352506490912722, | |
| "grad_norm": 3.0465903282165527, | |
| "learning_rate": 8.803479749001712e-06, | |
| "loss": 0.0229, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.5392450569203115, | |
| "grad_norm": 3.0167880058288574, | |
| "learning_rate": 8.789218482601257e-06, | |
| "loss": 0.0237, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.5432394647493509, | |
| "grad_norm": 2.1383025646209717, | |
| "learning_rate": 8.7749572162008e-06, | |
| "loss": 0.0204, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5472338725783903, | |
| "grad_norm": 4.080812931060791, | |
| "learning_rate": 8.760695949800343e-06, | |
| "loss": 0.0289, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5512282804074295, | |
| "grad_norm": 1.5765025615692139, | |
| "learning_rate": 8.746434683399886e-06, | |
| "loss": 0.0266, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5552226882364689, | |
| "grad_norm": 2.26448917388916, | |
| "learning_rate": 8.73217341699943e-06, | |
| "loss": 0.0209, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5592170960655083, | |
| "grad_norm": 2.3659119606018066, | |
| "learning_rate": 8.717912150598975e-06, | |
| "loss": 0.027, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.5632115038945477, | |
| "grad_norm": 4.025057315826416, | |
| "learning_rate": 8.703650884198518e-06, | |
| "loss": 0.0349, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.567205911723587, | |
| "grad_norm": 1.6939404010772705, | |
| "learning_rate": 8.689389617798061e-06, | |
| "loss": 0.0164, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.5712003195526263, | |
| "grad_norm": 2.2709405422210693, | |
| "learning_rate": 8.675128351397604e-06, | |
| "loss": 0.0239, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.5751947273816657, | |
| "grad_norm": 1.9596635103225708, | |
| "learning_rate": 8.66086708499715e-06, | |
| "loss": 0.0223, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.579189135210705, | |
| "grad_norm": 1.6292943954467773, | |
| "learning_rate": 8.646605818596692e-06, | |
| "loss": 0.0211, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.5831835430397444, | |
| "grad_norm": 2.063108205795288, | |
| "learning_rate": 8.632344552196236e-06, | |
| "loss": 0.0187, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.5871779508687837, | |
| "grad_norm": 2.243898868560791, | |
| "learning_rate": 8.618083285795779e-06, | |
| "loss": 0.0201, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.591172358697823, | |
| "grad_norm": 2.1789815425872803, | |
| "learning_rate": 8.603822019395322e-06, | |
| "loss": 0.0275, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.5951667665268624, | |
| "grad_norm": 2.9697136878967285, | |
| "learning_rate": 8.589560752994867e-06, | |
| "loss": 0.0265, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.5991611743559018, | |
| "grad_norm": 2.9582135677337646, | |
| "learning_rate": 8.57529948659441e-06, | |
| "loss": 0.0222, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.603155582184941, | |
| "grad_norm": 3.5698182582855225, | |
| "learning_rate": 8.561038220193954e-06, | |
| "loss": 0.0356, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.6071499900139804, | |
| "grad_norm": 1.6024861335754395, | |
| "learning_rate": 8.546776953793497e-06, | |
| "loss": 0.0262, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.6111443978430198, | |
| "grad_norm": 3.0098986625671387, | |
| "learning_rate": 8.532515687393042e-06, | |
| "loss": 0.0193, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.6151388056720591, | |
| "grad_norm": 2.4644837379455566, | |
| "learning_rate": 8.518254420992585e-06, | |
| "loss": 0.0228, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6191332135010985, | |
| "grad_norm": 1.8468703031539917, | |
| "learning_rate": 8.503993154592128e-06, | |
| "loss": 0.0205, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6231276213301378, | |
| "grad_norm": 1.8541923761367798, | |
| "learning_rate": 8.489731888191672e-06, | |
| "loss": 0.0288, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6271220291591771, | |
| "grad_norm": 2.5148401260375977, | |
| "learning_rate": 8.475470621791215e-06, | |
| "loss": 0.0236, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6311164369882165, | |
| "grad_norm": 1.897991418838501, | |
| "learning_rate": 8.46120935539076e-06, | |
| "loss": 0.0223, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6351108448172559, | |
| "grad_norm": 2.4308152198791504, | |
| "learning_rate": 8.446948088990303e-06, | |
| "loss": 0.0212, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.6391052526462951, | |
| "grad_norm": 2.6001315116882324, | |
| "learning_rate": 8.432686822589846e-06, | |
| "loss": 0.0305, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6430996604753345, | |
| "grad_norm": 2.6471099853515625, | |
| "learning_rate": 8.41842555618939e-06, | |
| "loss": 0.0217, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.6470940683043739, | |
| "grad_norm": 1.336553692817688, | |
| "learning_rate": 8.404164289788934e-06, | |
| "loss": 0.0219, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6510884761334133, | |
| "grad_norm": 2.2210710048675537, | |
| "learning_rate": 8.389903023388478e-06, | |
| "loss": 0.0229, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6550828839624525, | |
| "grad_norm": 1.4983543157577515, | |
| "learning_rate": 8.375641756988021e-06, | |
| "loss": 0.0229, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.6590772917914919, | |
| "grad_norm": 4.39826774597168, | |
| "learning_rate": 8.361380490587566e-06, | |
| "loss": 0.0258, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.6630716996205313, | |
| "grad_norm": 1.8835957050323486, | |
| "learning_rate": 8.347119224187107e-06, | |
| "loss": 0.0252, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6670661074495706, | |
| "grad_norm": 2.090482473373413, | |
| "learning_rate": 8.332857957786652e-06, | |
| "loss": 0.0249, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.67106051527861, | |
| "grad_norm": 4.0251336097717285, | |
| "learning_rate": 8.318596691386196e-06, | |
| "loss": 0.0358, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.6750549231076493, | |
| "grad_norm": 2.3550221920013428, | |
| "learning_rate": 8.304335424985739e-06, | |
| "loss": 0.0257, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.6790493309366886, | |
| "grad_norm": 3.605212926864624, | |
| "learning_rate": 8.290074158585282e-06, | |
| "loss": 0.0243, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.683043738765728, | |
| "grad_norm": 3.9534785747528076, | |
| "learning_rate": 8.275812892184827e-06, | |
| "loss": 0.021, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.6870381465947674, | |
| "grad_norm": 2.5969812870025635, | |
| "learning_rate": 8.26155162578437e-06, | |
| "loss": 0.0202, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.6910325544238066, | |
| "grad_norm": 2.922255277633667, | |
| "learning_rate": 8.247290359383914e-06, | |
| "loss": 0.0183, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.695026962252846, | |
| "grad_norm": 2.6963930130004883, | |
| "learning_rate": 8.233029092983458e-06, | |
| "loss": 0.0288, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.6990213700818854, | |
| "grad_norm": 1.4426485300064087, | |
| "learning_rate": 8.218767826583002e-06, | |
| "loss": 0.0267, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.7030157779109247, | |
| "grad_norm": 1.6854075193405151, | |
| "learning_rate": 8.204506560182545e-06, | |
| "loss": 0.0189, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7070101857399641, | |
| "grad_norm": 2.4257256984710693, | |
| "learning_rate": 8.190245293782088e-06, | |
| "loss": 0.0256, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7110045935690034, | |
| "grad_norm": 2.0894641876220703, | |
| "learning_rate": 8.175984027381633e-06, | |
| "loss": 0.02, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7149990013980427, | |
| "grad_norm": 2.5761284828186035, | |
| "learning_rate": 8.161722760981175e-06, | |
| "loss": 0.0205, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.7189934092270821, | |
| "grad_norm": 4.715319633483887, | |
| "learning_rate": 8.14746149458072e-06, | |
| "loss": 0.0219, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7229878170561215, | |
| "grad_norm": 2.933713674545288, | |
| "learning_rate": 8.133200228180263e-06, | |
| "loss": 0.0215, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7269822248851607, | |
| "grad_norm": 2.0981826782226562, | |
| "learning_rate": 8.118938961779806e-06, | |
| "loss": 0.0193, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7309766327142001, | |
| "grad_norm": 3.5366568565368652, | |
| "learning_rate": 8.104677695379351e-06, | |
| "loss": 0.0287, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7349710405432395, | |
| "grad_norm": 2.165713310241699, | |
| "learning_rate": 8.090416428978894e-06, | |
| "loss": 0.0183, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7389654483722788, | |
| "grad_norm": 3.0615482330322266, | |
| "learning_rate": 8.076155162578438e-06, | |
| "loss": 0.0233, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7429598562013181, | |
| "grad_norm": 2.1526572704315186, | |
| "learning_rate": 8.06189389617798e-06, | |
| "loss": 0.0316, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7469542640303575, | |
| "grad_norm": 1.9894167184829712, | |
| "learning_rate": 8.047632629777526e-06, | |
| "loss": 0.0214, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7509486718593968, | |
| "grad_norm": 1.5561473369598389, | |
| "learning_rate": 8.033371363377069e-06, | |
| "loss": 0.0246, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.7549430796884362, | |
| "grad_norm": 2.030789375305176, | |
| "learning_rate": 8.019110096976612e-06, | |
| "loss": 0.028, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7589374875174756, | |
| "grad_norm": 2.139389991760254, | |
| "learning_rate": 8.004848830576156e-06, | |
| "loss": 0.0192, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.7629318953465148, | |
| "grad_norm": 1.4104957580566406, | |
| "learning_rate": 7.990587564175699e-06, | |
| "loss": 0.0214, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.7669263031755542, | |
| "grad_norm": 3.027559280395508, | |
| "learning_rate": 7.976326297775244e-06, | |
| "loss": 0.0225, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.7709207110045936, | |
| "grad_norm": 2.3780980110168457, | |
| "learning_rate": 7.962065031374787e-06, | |
| "loss": 0.0231, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.774915118833633, | |
| "grad_norm": 2.3442487716674805, | |
| "learning_rate": 7.94780376497433e-06, | |
| "loss": 0.0223, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.7789095266626722, | |
| "grad_norm": 2.0246195793151855, | |
| "learning_rate": 7.933542498573873e-06, | |
| "loss": 0.0164, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.7829039344917116, | |
| "grad_norm": 1.8316524028778076, | |
| "learning_rate": 7.919281232173418e-06, | |
| "loss": 0.019, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.786898342320751, | |
| "grad_norm": 1.7914390563964844, | |
| "learning_rate": 7.905019965772962e-06, | |
| "loss": 0.0178, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.7908927501497903, | |
| "grad_norm": 1.9735348224639893, | |
| "learning_rate": 7.890758699372505e-06, | |
| "loss": 0.0144, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.7948871579788296, | |
| "grad_norm": 1.8981409072875977, | |
| "learning_rate": 7.876497432972048e-06, | |
| "loss": 0.0244, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.798881565807869, | |
| "grad_norm": 1.4974066019058228, | |
| "learning_rate": 7.862236166571591e-06, | |
| "loss": 0.0239, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.798881565807869, | |
| "eval_loss": 0.014320253394544125, | |
| "eval_runtime": 7268.4159, | |
| "eval_samples_per_second": 2.755, | |
| "eval_steps_per_second": 0.345, | |
| "eval_wer": 1.2956099257632903, | |
| "step": 2000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 7512, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.846946562048e+19, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |