QwenLeanSFT14B_0330 / trainer_state.json
WhiteGiverPlus's picture
Upload folder using huggingface_hub
6ddb72e verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 500,
"global_step": 1946,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.010277492291880781,
"grad_norm": 1.4090419657971816,
"learning_rate": 3.0612244897959185e-06,
"loss": 0.7119,
"step": 10
},
{
"epoch": 0.020554984583761562,
"grad_norm": 1.5528273772375807,
"learning_rate": 6.122448979591837e-06,
"loss": 0.6333,
"step": 20
},
{
"epoch": 0.030832476875642344,
"grad_norm": 0.6875726812709237,
"learning_rate": 9.183673469387756e-06,
"loss": 0.609,
"step": 30
},
{
"epoch": 0.041109969167523124,
"grad_norm": 0.5630238576069669,
"learning_rate": 1.2244897959183674e-05,
"loss": 0.5998,
"step": 40
},
{
"epoch": 0.051387461459403906,
"grad_norm": 0.49796436120814924,
"learning_rate": 1.530612244897959e-05,
"loss": 0.5692,
"step": 50
},
{
"epoch": 0.06166495375128469,
"grad_norm": 0.48386904448400564,
"learning_rate": 1.836734693877551e-05,
"loss": 0.5833,
"step": 60
},
{
"epoch": 0.07194244604316546,
"grad_norm": 0.5317730488962762,
"learning_rate": 2.1428571428571428e-05,
"loss": 0.5632,
"step": 70
},
{
"epoch": 0.08221993833504625,
"grad_norm": 0.4884815339298427,
"learning_rate": 2.448979591836735e-05,
"loss": 0.5804,
"step": 80
},
{
"epoch": 0.09249743062692703,
"grad_norm": 0.4738318035314195,
"learning_rate": 2.7551020408163265e-05,
"loss": 0.5805,
"step": 90
},
{
"epoch": 0.10277492291880781,
"grad_norm": 0.5201543078674129,
"learning_rate": 2.9999219710414462e-05,
"loss": 0.5904,
"step": 100
},
{
"epoch": 0.1130524152106886,
"grad_norm": 0.5194572571460542,
"learning_rate": 2.9971918097900504e-05,
"loss": 0.5891,
"step": 110
},
{
"epoch": 0.12332990750256938,
"grad_norm": 0.5451888545802674,
"learning_rate": 2.9905683148398642e-05,
"loss": 0.5912,
"step": 120
},
{
"epoch": 0.13360739979445016,
"grad_norm": 0.4551734320613588,
"learning_rate": 2.9800687100869334e-05,
"loss": 0.5834,
"step": 130
},
{
"epoch": 0.14388489208633093,
"grad_norm": 0.4201748639994036,
"learning_rate": 2.9657202989567393e-05,
"loss": 0.564,
"step": 140
},
{
"epoch": 0.15416238437821173,
"grad_norm": 0.39502869903115356,
"learning_rate": 2.94756039340371e-05,
"loss": 0.5842,
"step": 150
},
{
"epoch": 0.1644398766700925,
"grad_norm": 0.45207720954882885,
"learning_rate": 2.9256362168843153e-05,
"loss": 0.5748,
"step": 160
},
{
"epoch": 0.1747173689619733,
"grad_norm": 0.5380473565961726,
"learning_rate": 2.90000478155605e-05,
"loss": 0.5773,
"step": 170
},
{
"epoch": 0.18499486125385406,
"grad_norm": 0.43815842467576244,
"learning_rate": 2.870732740021648e-05,
"loss": 0.5639,
"step": 180
},
{
"epoch": 0.19527235354573483,
"grad_norm": 0.4809575724437436,
"learning_rate": 2.837896212004041e-05,
"loss": 0.5681,
"step": 190
},
{
"epoch": 0.20554984583761562,
"grad_norm": 0.43270245528917933,
"learning_rate": 2.801580586402798e-05,
"loss": 0.5714,
"step": 200
},
{
"epoch": 0.2158273381294964,
"grad_norm": 0.430262537558555,
"learning_rate": 2.7618802992467718e-05,
"loss": 0.5576,
"step": 210
},
{
"epoch": 0.2261048304213772,
"grad_norm": 0.44110443094179325,
"learning_rate": 2.7188985881203685e-05,
"loss": 0.5691,
"step": 220
},
{
"epoch": 0.23638232271325796,
"grad_norm": 0.4032486283659276,
"learning_rate": 2.672747223702045e-05,
"loss": 0.5762,
"step": 230
},
{
"epoch": 0.24665981500513876,
"grad_norm": 0.4242787937283615,
"learning_rate": 2.623546219113139e-05,
"loss": 0.5643,
"step": 240
},
{
"epoch": 0.2569373072970195,
"grad_norm": 0.4836565922888775,
"learning_rate": 2.5714235178328554e-05,
"loss": 0.5774,
"step": 250
},
{
"epoch": 0.2672147995889003,
"grad_norm": 0.42526900764591785,
"learning_rate": 2.516514660990954e-05,
"loss": 0.5627,
"step": 260
},
{
"epoch": 0.2774922918807811,
"grad_norm": 0.3683992684534281,
"learning_rate": 2.4589624349033273e-05,
"loss": 0.5594,
"step": 270
},
{
"epoch": 0.28776978417266186,
"grad_norm": 0.41496288894802913,
"learning_rate": 2.3989164997670207e-05,
"loss": 0.5562,
"step": 280
},
{
"epoch": 0.29804727646454265,
"grad_norm": 0.3635108549251573,
"learning_rate": 2.3365330004802443e-05,
"loss": 0.5301,
"step": 290
},
{
"epoch": 0.30832476875642345,
"grad_norm": 0.3913157769758501,
"learning_rate": 2.271974160599415e-05,
"loss": 0.5506,
"step": 300
},
{
"epoch": 0.3186022610483042,
"grad_norm": 0.4061061729653975,
"learning_rate": 2.2054078604891052e-05,
"loss": 0.5517,
"step": 310
},
{
"epoch": 0.328879753340185,
"grad_norm": 0.38228520965937396,
"learning_rate": 2.1370072007618947e-05,
"loss": 0.5622,
"step": 320
},
{
"epoch": 0.3391572456320658,
"grad_norm": 0.4320279036786554,
"learning_rate": 2.06695005214336e-05,
"loss": 0.5447,
"step": 330
},
{
"epoch": 0.3494347379239466,
"grad_norm": 0.37703622035095746,
"learning_rate": 1.995418592932751e-05,
"loss": 0.541,
"step": 340
},
{
"epoch": 0.3597122302158273,
"grad_norm": 0.34784635261686314,
"learning_rate": 1.9225988352621445e-05,
"loss": 0.5433,
"step": 350
},
{
"epoch": 0.3699897225077081,
"grad_norm": 0.40884262314646,
"learning_rate": 1.8486801413860122e-05,
"loss": 0.5416,
"step": 360
},
{
"epoch": 0.3802672147995889,
"grad_norm": 0.3872918353959618,
"learning_rate": 1.7738547312590426e-05,
"loss": 0.5455,
"step": 370
},
{
"epoch": 0.39054470709146966,
"grad_norm": 0.3909142802295015,
"learning_rate": 1.6983171826827357e-05,
"loss": 0.5331,
"step": 380
},
{
"epoch": 0.40082219938335045,
"grad_norm": 0.4247312946608561,
"learning_rate": 1.6222639253205944e-05,
"loss": 0.5468,
"step": 390
},
{
"epoch": 0.41109969167523125,
"grad_norm": 0.3779654218810271,
"learning_rate": 1.5458927298976893e-05,
"loss": 0.5274,
"step": 400
},
{
"epoch": 0.42137718396711205,
"grad_norm": 0.3835548049313215,
"learning_rate": 1.4694021939128921e-05,
"loss": 0.5433,
"step": 410
},
{
"epoch": 0.4316546762589928,
"grad_norm": 0.3430867289416931,
"learning_rate": 1.3929912252011512e-05,
"loss": 0.5292,
"step": 420
},
{
"epoch": 0.4419321685508736,
"grad_norm": 0.33530951933279407,
"learning_rate": 1.3168585246887604e-05,
"loss": 0.5288,
"step": 430
},
{
"epoch": 0.4522096608427544,
"grad_norm": 0.38479043994508066,
"learning_rate": 1.2412020696866724e-05,
"loss": 0.5356,
"step": 440
},
{
"epoch": 0.4624871531346352,
"grad_norm": 0.38638629022508425,
"learning_rate": 1.1662185990655285e-05,
"loss": 0.5491,
"step": 450
},
{
"epoch": 0.4727646454265159,
"grad_norm": 0.35722768346449285,
"learning_rate": 1.0921031016511509e-05,
"loss": 0.5253,
"step": 460
},
{
"epoch": 0.4830421377183967,
"grad_norm": 0.3735863030025159,
"learning_rate": 1.0190483091708966e-05,
"loss": 0.5246,
"step": 470
},
{
"epoch": 0.4933196300102775,
"grad_norm": 0.38885388838001816,
"learning_rate": 9.472441950694257e-06,
"loss": 0.5311,
"step": 480
},
{
"epoch": 0.5035971223021583,
"grad_norm": 0.3260936892474308,
"learning_rate": 8.768774804971705e-06,
"loss": 0.5341,
"step": 490
},
{
"epoch": 0.513874614594039,
"grad_norm": 0.3552425546911982,
"learning_rate": 8.081311487561545e-06,
"loss": 0.5141,
"step": 500
},
{
"epoch": 0.5241521068859198,
"grad_norm": 0.34385819298403936,
"learning_rate": 7.411839694657976e-06,
"loss": 0.5165,
"step": 510
},
{
"epoch": 0.5344295991778006,
"grad_norm": 0.3585285471621075,
"learning_rate": 6.762100336860873e-06,
"loss": 0.5085,
"step": 520
},
{
"epoch": 0.5447070914696814,
"grad_norm": 0.3607978964114949,
"learning_rate": 6.133783012069853e-06,
"loss": 0.5158,
"step": 530
},
{
"epoch": 0.5549845837615622,
"grad_norm": 0.3297200319919343,
"learning_rate": 5.528521611813133e-06,
"loss": 0.5313,
"step": 540
},
{
"epoch": 0.5652620760534429,
"grad_norm": 0.3379144676775949,
"learning_rate": 4.947890072436609e-06,
"loss": 0.5193,
"step": 550
},
{
"epoch": 0.5755395683453237,
"grad_norm": 0.333040604472348,
"learning_rate": 4.393398282201788e-06,
"loss": 0.5108,
"step": 560
},
{
"epoch": 0.5858170606372045,
"grad_norm": 0.3389958077738962,
"learning_rate": 3.866488154935951e-06,
"loss": 0.5356,
"step": 570
},
{
"epoch": 0.5960945529290853,
"grad_norm": 0.3518113489771542,
"learning_rate": 3.3685298804446406e-06,
"loss": 0.4915,
"step": 580
},
{
"epoch": 0.6063720452209661,
"grad_norm": 0.39595048545111644,
"learning_rate": 2.900818361437053e-06,
"loss": 0.5135,
"step": 590
},
{
"epoch": 0.6166495375128469,
"grad_norm": 0.3359427514687364,
"learning_rate": 2.4645698462297583e-06,
"loss": 0.5052,
"step": 600
},
{
"epoch": 0.6269270298047277,
"grad_norm": 0.3227737246533257,
"learning_rate": 2.0609187659852914e-06,
"loss": 0.5023,
"step": 610
},
{
"epoch": 0.6372045220966084,
"grad_norm": 0.32500420569729616,
"learning_rate": 1.6909147847099526e-06,
"loss": 0.5359,
"step": 620
},
{
"epoch": 0.6474820143884892,
"grad_norm": 0.3322817521202919,
"learning_rate": 1.3555200696822235e-06,
"loss": 0.5073,
"step": 630
},
{
"epoch": 0.65775950668037,
"grad_norm": 0.3496806144821667,
"learning_rate": 1.0556067894097571e-06,
"loss": 0.5242,
"step": 640
},
{
"epoch": 0.6680369989722508,
"grad_norm": 0.30896880735642235,
"learning_rate": 7.919548456213516e-07,
"loss": 0.5155,
"step": 650
},
{
"epoch": 0.6783144912641316,
"grad_norm": 0.3257972583698041,
"learning_rate": 5.652498451916799e-07,
"loss": 0.5158,
"step": 660
},
{
"epoch": 0.6885919835560124,
"grad_norm": 0.3201104743635745,
"learning_rate": 3.760813172726457e-07,
"loss": 0.5142,
"step": 670
},
{
"epoch": 0.6988694758478932,
"grad_norm": 0.34686855669764965,
"learning_rate": 2.2494118026754551e-07,
"loss": 0.5232,
"step": 680
},
{
"epoch": 0.7091469681397738,
"grad_norm": 0.30350285406793465,
"learning_rate": 1.1222246263458469e-07,
"loss": 0.5148,
"step": 690
},
{
"epoch": 0.7194244604316546,
"grad_norm": 0.31292172166688387,
"learning_rate": 3.821828084619727e-08,
"loss": 0.5118,
"step": 700
},
{
"epoch": 0.7297019527235354,
"grad_norm": 0.32011342982016167,
"learning_rate": 3.1210771619027966e-09,
"loss": 0.5009,
"step": 710
},
{
"epoch": 0.7399794450154162,
"grad_norm": 0.35647392118546595,
"learning_rate": 7.022119196808396e-09,
"loss": 0.5148,
"step": 720
},
{
"epoch": 0.750256937307297,
"grad_norm": 0.3191068876583651,
"learning_rate": 4.991126258710177e-08,
"loss": 0.5122,
"step": 730
},
{
"epoch": 0.7605344295991778,
"grad_norm": 0.32588243816148815,
"learning_rate": 1.3167697736969798e-07,
"loss": 0.4926,
"step": 740
},
{
"epoch": 0.7708119218910586,
"grad_norm": 0.3201190610520271,
"learning_rate": 2.5210663800745493e-07,
"loss": 0.5149,
"step": 750
},
{
"epoch": 0.7810894141829393,
"grad_norm": 0.35476449861767284,
"learning_rate": 4.108870763057343e-07,
"loss": 0.5118,
"step": 760
},
{
"epoch": 0.7913669064748201,
"grad_norm": 0.3125164507289312,
"learning_rate": 6.076053957825395e-07,
"loss": 0.5157,
"step": 770
},
{
"epoch": 0.8016443987667009,
"grad_norm": 0.3366477883256346,
"learning_rate": 8.417500453744864e-07,
"loss": 0.5292,
"step": 780
},
{
"epoch": 0.8119218910585817,
"grad_norm": 0.3269719144972129,
"learning_rate": 1.1127121496865256e-06,
"loss": 0.5143,
"step": 790
},
{
"epoch": 0.8221993833504625,
"grad_norm": 0.3126014006138838,
"learning_rate": 1.419787092326219e-06,
"loss": 0.5155,
"step": 800
},
{
"epoch": 0.8324768756423433,
"grad_norm": 0.36354641808871235,
"learning_rate": 1.7621763482051812e-06,
"loss": 0.5044,
"step": 810
},
{
"epoch": 0.8427543679342241,
"grad_norm": 0.36699145399738387,
"learning_rate": 2.138989560043002e-06,
"loss": 0.4822,
"step": 820
},
{
"epoch": 0.8530318602261048,
"grad_norm": 0.33037450883968417,
"learning_rate": 2.549246853673793e-06,
"loss": 0.5087,
"step": 830
},
{
"epoch": 0.8633093525179856,
"grad_norm": 0.3257167825813466,
"learning_rate": 2.991881386134589e-06,
"loss": 0.5192,
"step": 840
},
{
"epoch": 0.8735868448098664,
"grad_norm": 0.3100164686825301,
"learning_rate": 3.465742119909566e-06,
"loss": 0.498,
"step": 850
},
{
"epoch": 0.8838643371017472,
"grad_norm": 0.34697143051842105,
"learning_rate": 3.969596816115712e-06,
"loss": 0.506,
"step": 860
},
{
"epoch": 0.894141829393628,
"grad_norm": 0.33866121968347435,
"learning_rate": 4.502135238846574e-06,
"loss": 0.5293,
"step": 870
},
{
"epoch": 0.9044193216855088,
"grad_norm": 0.36845378999111805,
"learning_rate": 5.061972562341309e-06,
"loss": 0.4909,
"step": 880
},
{
"epoch": 0.9146968139773896,
"grad_norm": 0.387252650542103,
"learning_rate": 5.647652972118995e-06,
"loss": 0.5312,
"step": 890
},
{
"epoch": 0.9249743062692704,
"grad_norm": 0.3647647773617935,
"learning_rate": 6.257653450713748e-06,
"loss": 0.5101,
"step": 900
},
{
"epoch": 0.935251798561151,
"grad_norm": 0.33897356731666334,
"learning_rate": 6.890387738166038e-06,
"loss": 0.5166,
"step": 910
},
{
"epoch": 0.9455292908530318,
"grad_norm": 0.3868822782515743,
"learning_rate": 7.5442104569713904e-06,
"loss": 0.4984,
"step": 920
},
{
"epoch": 0.9558067831449126,
"grad_norm": 0.35781401043924416,
"learning_rate": 8.217421390759708e-06,
"loss": 0.4994,
"step": 930
},
{
"epoch": 0.9660842754367934,
"grad_norm": 0.384761931416951,
"learning_rate": 8.908269905578991e-06,
"loss": 0.5099,
"step": 940
},
{
"epoch": 0.9763617677286742,
"grad_norm": 0.42104665274918485,
"learning_rate": 9.614959502286018e-06,
"loss": 0.5258,
"step": 950
},
{
"epoch": 0.986639260020555,
"grad_norm": 0.4029794401605718,
"learning_rate": 1.033565248820611e-05,
"loss": 0.5263,
"step": 960
},
{
"epoch": 0.9969167523124358,
"grad_norm": 0.39772123505901325,
"learning_rate": 1.106847475591347e-05,
"loss": 0.5244,
"step": 970
},
{
"epoch": 1.0071942446043165,
"grad_norm": 0.4048130684054564,
"learning_rate": 1.1811520656705356e-05,
"loss": 0.4667,
"step": 980
},
{
"epoch": 1.0174717368961974,
"grad_norm": 0.3967838935343299,
"learning_rate": 1.256285795609691e-05,
"loss": 0.4647,
"step": 990
},
{
"epoch": 1.027749229188078,
"grad_norm": 0.43071529156369454,
"learning_rate": 1.3320532858450379e-05,
"loss": 0.4419,
"step": 1000
},
{
"epoch": 1.0380267214799588,
"grad_norm": 0.39440960560899796,
"learning_rate": 1.408257508767236e-05,
"loss": 0.4559,
"step": 1010
},
{
"epoch": 1.0483042137718397,
"grad_norm": 0.3955707807238948,
"learning_rate": 1.4847003010767309e-05,
"loss": 0.468,
"step": 1020
},
{
"epoch": 1.0585817060637204,
"grad_norm": 0.46986854711677106,
"learning_rate": 1.5611828790923776e-05,
"loss": 0.4536,
"step": 1030
},
{
"epoch": 1.0688591983556013,
"grad_norm": 0.4115140300945968,
"learning_rate": 1.6375063556733252e-05,
"loss": 0.4564,
"step": 1040
},
{
"epoch": 1.079136690647482,
"grad_norm": 0.44106204752327893,
"learning_rate": 1.7134722574099274e-05,
"loss": 0.458,
"step": 1050
},
{
"epoch": 1.0894141829393629,
"grad_norm": 0.47583761779999495,
"learning_rate": 1.7888830407387902e-05,
"loss": 0.4429,
"step": 1060
},
{
"epoch": 1.0996916752312436,
"grad_norm": 0.4144253978666447,
"learning_rate": 1.8635426056398183e-05,
"loss": 0.4724,
"step": 1070
},
{
"epoch": 1.1099691675231242,
"grad_norm": 0.4487588852746433,
"learning_rate": 1.9372568055794383e-05,
"loss": 0.4671,
"step": 1080
},
{
"epoch": 1.1202466598150052,
"grad_norm": 0.4360867503615642,
"learning_rate": 2.0098339523739255e-05,
"loss": 0.4515,
"step": 1090
},
{
"epoch": 1.1305241521068858,
"grad_norm": 0.4534710394135783,
"learning_rate": 2.081085314659985e-05,
"loss": 0.4687,
"step": 1100
},
{
"epoch": 1.1408016443987667,
"grad_norm": 0.407838255680345,
"learning_rate": 2.1508256086763382e-05,
"loss": 0.4498,
"step": 1110
},
{
"epoch": 1.1510791366906474,
"grad_norm": 0.4377752649694931,
"learning_rate": 2.218873480080084e-05,
"loss": 0.4605,
"step": 1120
},
{
"epoch": 1.1613566289825283,
"grad_norm": 0.45327786293937533,
"learning_rate": 2.285051975544917e-05,
"loss": 0.4748,
"step": 1130
},
{
"epoch": 1.171634121274409,
"grad_norm": 0.47562670380204264,
"learning_rate": 2.3491890029148105e-05,
"loss": 0.4829,
"step": 1140
},
{
"epoch": 1.1819116135662897,
"grad_norm": 0.43638700959699767,
"learning_rate": 2.4111177787166208e-05,
"loss": 0.4795,
"step": 1150
},
{
"epoch": 1.1921891058581706,
"grad_norm": 0.5211794580103576,
"learning_rate": 2.4706772618678505e-05,
"loss": 0.4762,
"step": 1160
},
{
"epoch": 1.2024665981500513,
"grad_norm": 0.414897294023872,
"learning_rate": 2.5277125724517662e-05,
"loss": 0.4851,
"step": 1170
},
{
"epoch": 1.2127440904419322,
"grad_norm": 0.4010723068335088,
"learning_rate": 2.582075394470868e-05,
"loss": 0.4896,
"step": 1180
},
{
"epoch": 1.223021582733813,
"grad_norm": 0.42016718369332373,
"learning_rate": 2.6336243615313876e-05,
"loss": 0.473,
"step": 1190
},
{
"epoch": 1.2332990750256938,
"grad_norm": 0.43068136492845654,
"learning_rate": 2.682225424455871e-05,
"loss": 0.4673,
"step": 1200
},
{
"epoch": 1.2435765673175745,
"grad_norm": 0.4177444143376056,
"learning_rate": 2.7277521998678908e-05,
"loss": 0.4753,
"step": 1210
},
{
"epoch": 1.2538540596094552,
"grad_norm": 0.40201721497527027,
"learning_rate": 2.7700862988424254e-05,
"loss": 0.4707,
"step": 1220
},
{
"epoch": 1.264131551901336,
"grad_norm": 0.4491628984476387,
"learning_rate": 2.8091176347672836e-05,
"loss": 0.4896,
"step": 1230
},
{
"epoch": 1.274409044193217,
"grad_norm": 0.48516694721767883,
"learning_rate": 2.8447447096149756e-05,
"loss": 0.4675,
"step": 1240
},
{
"epoch": 1.2846865364850977,
"grad_norm": 0.4709039922274958,
"learning_rate": 2.8768748778806387e-05,
"loss": 0.4941,
"step": 1250
},
{
"epoch": 1.2949640287769784,
"grad_norm": 0.4204135412519858,
"learning_rate": 2.9054245874996426e-05,
"loss": 0.4701,
"step": 1260
},
{
"epoch": 1.3052415210688593,
"grad_norm": 0.40339521660066563,
"learning_rate": 2.9303195971183912e-05,
"loss": 0.4869,
"step": 1270
},
{
"epoch": 1.31551901336074,
"grad_norm": 0.4613749457606698,
"learning_rate": 2.951495169153333e-05,
"loss": 0.476,
"step": 1280
},
{
"epoch": 1.3257965056526206,
"grad_norm": 0.403722598944795,
"learning_rate": 2.9688962381361317e-05,
"loss": 0.4754,
"step": 1290
},
{
"epoch": 1.3360739979445015,
"grad_norm": 0.4143629787165823,
"learning_rate": 2.9824775539072402e-05,
"loss": 0.4752,
"step": 1300
},
{
"epoch": 1.3463514902363825,
"grad_norm": 0.3799384001189195,
"learning_rate": 2.992203799285506e-05,
"loss": 0.4872,
"step": 1310
},
{
"epoch": 1.3566289825282631,
"grad_norm": 0.40123402762720445,
"learning_rate": 2.9980496819078232e-05,
"loss": 0.471,
"step": 1320
},
{
"epoch": 1.3669064748201438,
"grad_norm": 0.4269435577016413,
"learning_rate": 3e-05,
"loss": 0.4809,
"step": 1330
},
{
"epoch": 1.3771839671120247,
"grad_norm": 0.4859736842291524,
"learning_rate": 2.9980496819078232e-05,
"loss": 0.4948,
"step": 1340
},
{
"epoch": 1.3874614594039054,
"grad_norm": 0.4696636005599923,
"learning_rate": 2.9922037992855063e-05,
"loss": 0.4767,
"step": 1350
},
{
"epoch": 1.397738951695786,
"grad_norm": 0.4387419339812974,
"learning_rate": 2.9824775539072402e-05,
"loss": 0.489,
"step": 1360
},
{
"epoch": 1.408016443987667,
"grad_norm": 0.3997269476655739,
"learning_rate": 2.9688962381361317e-05,
"loss": 0.476,
"step": 1370
},
{
"epoch": 1.418293936279548,
"grad_norm": 0.456762318598325,
"learning_rate": 2.9514951691533335e-05,
"loss": 0.4859,
"step": 1380
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.3988144971120737,
"learning_rate": 2.930319597118392e-05,
"loss": 0.4811,
"step": 1390
},
{
"epoch": 1.4388489208633093,
"grad_norm": 0.469809339558885,
"learning_rate": 2.905424587499643e-05,
"loss": 0.4576,
"step": 1400
},
{
"epoch": 1.4491264131551902,
"grad_norm": 0.40204250371661987,
"learning_rate": 2.8768748778806387e-05,
"loss": 0.4792,
"step": 1410
},
{
"epoch": 1.4594039054470709,
"grad_norm": 0.43278844680785417,
"learning_rate": 2.8447447096149766e-05,
"loss": 0.471,
"step": 1420
},
{
"epoch": 1.4696813977389516,
"grad_norm": 0.43570757632960133,
"learning_rate": 2.8091176347672846e-05,
"loss": 0.4503,
"step": 1430
},
{
"epoch": 1.4799588900308325,
"grad_norm": 0.3801533344075315,
"learning_rate": 2.7700862988424265e-05,
"loss": 0.4865,
"step": 1440
},
{
"epoch": 1.4902363823227134,
"grad_norm": 0.3636715368816546,
"learning_rate": 2.7277521998678904e-05,
"loss": 0.4729,
"step": 1450
},
{
"epoch": 1.500513874614594,
"grad_norm": 0.4110765263131838,
"learning_rate": 2.682225424455871e-05,
"loss": 0.4738,
"step": 1460
},
{
"epoch": 1.5107913669064748,
"grad_norm": 0.43381733872982453,
"learning_rate": 2.6336243615313872e-05,
"loss": 0.4829,
"step": 1470
},
{
"epoch": 1.5210688591983557,
"grad_norm": 0.40182611171324867,
"learning_rate": 2.5820753944708684e-05,
"loss": 0.4559,
"step": 1480
},
{
"epoch": 1.5313463514902363,
"grad_norm": 0.40535624187153857,
"learning_rate": 2.527712572451767e-05,
"loss": 0.4643,
"step": 1490
},
{
"epoch": 1.541623843782117,
"grad_norm": 0.4115337818793653,
"learning_rate": 2.470677261867851e-05,
"loss": 0.4597,
"step": 1500
},
{
"epoch": 1.551901336073998,
"grad_norm": 0.407199487411736,
"learning_rate": 2.411117778716621e-05,
"loss": 0.4501,
"step": 1510
},
{
"epoch": 1.5621788283658788,
"grad_norm": 0.3743288001977607,
"learning_rate": 2.3491890029148122e-05,
"loss": 0.4744,
"step": 1520
},
{
"epoch": 1.5724563206577595,
"grad_norm": 0.3699307654421427,
"learning_rate": 2.285051975544919e-05,
"loss": 0.4565,
"step": 1530
},
{
"epoch": 1.5827338129496402,
"grad_norm": 0.4028551311302927,
"learning_rate": 2.2188734800800862e-05,
"loss": 0.4556,
"step": 1540
},
{
"epoch": 1.5930113052415211,
"grad_norm": 0.4747323206923733,
"learning_rate": 2.150825608676338e-05,
"loss": 0.4694,
"step": 1550
},
{
"epoch": 1.6032887975334018,
"grad_norm": 0.38235625826277586,
"learning_rate": 2.0810853146599847e-05,
"loss": 0.4722,
"step": 1560
},
{
"epoch": 1.6135662898252825,
"grad_norm": 0.37462427392587805,
"learning_rate": 2.0098339523739252e-05,
"loss": 0.4494,
"step": 1570
},
{
"epoch": 1.6238437821171634,
"grad_norm": 0.4138622513625198,
"learning_rate": 1.9372568055794376e-05,
"loss": 0.4344,
"step": 1580
},
{
"epoch": 1.6341212744090443,
"grad_norm": 0.3402046797002805,
"learning_rate": 1.863542605639818e-05,
"loss": 0.4407,
"step": 1590
},
{
"epoch": 1.644398766700925,
"grad_norm": 0.3414266415166782,
"learning_rate": 1.78888304073879e-05,
"loss": 0.4544,
"step": 1600
},
{
"epoch": 1.6546762589928057,
"grad_norm": 0.390886428434792,
"learning_rate": 1.713472257409928e-05,
"loss": 0.4341,
"step": 1610
},
{
"epoch": 1.6649537512846866,
"grad_norm": 0.40001689741621593,
"learning_rate": 1.6375063556733273e-05,
"loss": 0.4521,
"step": 1620
},
{
"epoch": 1.6752312435765673,
"grad_norm": 0.35804788765594897,
"learning_rate": 1.5611828790923796e-05,
"loss": 0.4387,
"step": 1630
},
{
"epoch": 1.685508735868448,
"grad_norm": 0.36748214820791725,
"learning_rate": 1.4847003010767315e-05,
"loss": 0.4646,
"step": 1640
},
{
"epoch": 1.6957862281603289,
"grad_norm": 0.36341313104520045,
"learning_rate": 1.4082575087672367e-05,
"loss": 0.4415,
"step": 1650
},
{
"epoch": 1.7060637204522098,
"grad_norm": 0.3576653140862902,
"learning_rate": 1.3320532858450387e-05,
"loss": 0.4399,
"step": 1660
},
{
"epoch": 1.7163412127440905,
"grad_norm": 0.36581212655600825,
"learning_rate": 1.2562857956096917e-05,
"loss": 0.4369,
"step": 1670
},
{
"epoch": 1.7266187050359711,
"grad_norm": 0.45119158137057136,
"learning_rate": 1.1811520656705362e-05,
"loss": 0.4248,
"step": 1680
},
{
"epoch": 1.736896197327852,
"grad_norm": 0.3724511924577398,
"learning_rate": 1.1068474755913466e-05,
"loss": 0.4395,
"step": 1690
},
{
"epoch": 1.7471736896197327,
"grad_norm": 0.3751212959461515,
"learning_rate": 1.0335652488206117e-05,
"loss": 0.4201,
"step": 1700
},
{
"epoch": 1.7574511819116134,
"grad_norm": 0.3464962966358675,
"learning_rate": 9.614959502286013e-06,
"loss": 0.4043,
"step": 1710
},
{
"epoch": 1.7677286742034943,
"grad_norm": 0.3663266639181022,
"learning_rate": 8.908269905578998e-06,
"loss": 0.4129,
"step": 1720
},
{
"epoch": 1.7780061664953752,
"grad_norm": 0.35132549079437864,
"learning_rate": 8.217421390759727e-06,
"loss": 0.4164,
"step": 1730
},
{
"epoch": 1.788283658787256,
"grad_norm": 0.3404967583222996,
"learning_rate": 7.544210456971385e-06,
"loss": 0.4265,
"step": 1740
},
{
"epoch": 1.7985611510791366,
"grad_norm": 0.469045408430661,
"learning_rate": 6.890387738166045e-06,
"loss": 0.4104,
"step": 1750
},
{
"epoch": 1.8088386433710175,
"grad_norm": 0.3604653210084856,
"learning_rate": 6.257653450713743e-06,
"loss": 0.4256,
"step": 1760
},
{
"epoch": 1.8191161356628982,
"grad_norm": 0.34250220546967325,
"learning_rate": 5.647652972119002e-06,
"loss": 0.4136,
"step": 1770
},
{
"epoch": 1.829393627954779,
"grad_norm": 0.36630040080056403,
"learning_rate": 5.061972562341319e-06,
"loss": 0.4158,
"step": 1780
},
{
"epoch": 1.8396711202466598,
"grad_norm": 0.41269762392169684,
"learning_rate": 4.502135238846574e-06,
"loss": 0.4169,
"step": 1790
},
{
"epoch": 1.8499486125385407,
"grad_norm": 0.36458609289471444,
"learning_rate": 3.96959681611572e-06,
"loss": 0.3983,
"step": 1800
},
{
"epoch": 1.8602261048304214,
"grad_norm": 0.3384192979617524,
"learning_rate": 3.465742119909566e-06,
"loss": 0.4187,
"step": 1810
},
{
"epoch": 1.870503597122302,
"grad_norm": 0.3558048886988858,
"learning_rate": 2.9918813861345975e-06,
"loss": 0.4191,
"step": 1820
},
{
"epoch": 1.880781089414183,
"grad_norm": 0.35735445996159837,
"learning_rate": 2.5492468536738013e-06,
"loss": 0.3931,
"step": 1830
},
{
"epoch": 1.8910585817060637,
"grad_norm": 0.41549465965742105,
"learning_rate": 2.138989560043004e-06,
"loss": 0.406,
"step": 1840
},
{
"epoch": 1.9013360739979444,
"grad_norm": 0.35888882880957634,
"learning_rate": 1.7621763482051878e-06,
"loss": 0.4213,
"step": 1850
},
{
"epoch": 1.9116135662898253,
"grad_norm": 0.3876466527861625,
"learning_rate": 1.419787092326219e-06,
"loss": 0.4205,
"step": 1860
},
{
"epoch": 1.9218910585817062,
"grad_norm": 0.3916893545346833,
"learning_rate": 1.1127121496865321e-06,
"loss": 0.3946,
"step": 1870
},
{
"epoch": 1.9321685508735869,
"grad_norm": 0.3473782488747357,
"learning_rate": 8.417500453744848e-07,
"loss": 0.4043,
"step": 1880
},
{
"epoch": 1.9424460431654675,
"grad_norm": 0.3347296327648612,
"learning_rate": 6.076053957825395e-07,
"loss": 0.4002,
"step": 1890
},
{
"epoch": 1.9527235354573484,
"grad_norm": 0.3341213934258483,
"learning_rate": 4.1088707630573096e-07,
"loss": 0.4121,
"step": 1900
},
{
"epoch": 1.9630010277492291,
"grad_norm": 0.3441476454719243,
"learning_rate": 2.5210663800745493e-07,
"loss": 0.4047,
"step": 1910
},
{
"epoch": 1.9732785200411098,
"grad_norm": 0.41400797241467135,
"learning_rate": 1.316769773697013e-07,
"loss": 0.3995,
"step": 1920
},
{
"epoch": 1.9835560123329907,
"grad_norm": 0.37953330032711113,
"learning_rate": 4.991126258710344e-08,
"loss": 0.4065,
"step": 1930
},
{
"epoch": 1.9938335046248716,
"grad_norm": 0.3420466537478671,
"learning_rate": 7.022119196808396e-09,
"loss": 0.4018,
"step": 1940
},
{
"epoch": 2.0,
"step": 1946,
"total_flos": 1232698903363584.0,
"train_loss": 0.49459491236726894,
"train_runtime": 32220.0255,
"train_samples_per_second": 1.932,
"train_steps_per_second": 0.06
}
],
"logging_steps": 10,
"max_steps": 1946,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 2000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1232698903363584.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}