Genshin-5445 / trainer_state.json
FirstPotatoCoder's picture
Upload folder using huggingface_hub
b2976bd verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.0,
"eval_steps": 500,
"global_step": 5445,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.018369690011481057,
"grad_norm": 0.8683544993400574,
"learning_rate": 9.5e-05,
"loss": 5.5925,
"step": 20
},
{
"epoch": 0.03673938002296211,
"grad_norm": 0.7329673171043396,
"learning_rate": 9.970832054037457e-05,
"loss": 5.4275,
"step": 40
},
{
"epoch": 0.05510907003444317,
"grad_norm": 0.658450722694397,
"learning_rate": 9.940128953024256e-05,
"loss": 5.3361,
"step": 60
},
{
"epoch": 0.07347876004592423,
"grad_norm": 0.6886904239654541,
"learning_rate": 9.909425852011053e-05,
"loss": 5.2746,
"step": 80
},
{
"epoch": 0.09184845005740529,
"grad_norm": 0.908559262752533,
"learning_rate": 9.878722750997851e-05,
"loss": 5.178,
"step": 100
},
{
"epoch": 0.11021814006888633,
"grad_norm": 0.613741397857666,
"learning_rate": 9.848019649984648e-05,
"loss": 5.3301,
"step": 120
},
{
"epoch": 0.1285878300803674,
"grad_norm": 0.8728771805763245,
"learning_rate": 9.817316548971447e-05,
"loss": 5.2269,
"step": 140
},
{
"epoch": 0.14695752009184845,
"grad_norm": 0.6336873769760132,
"learning_rate": 9.786613447958243e-05,
"loss": 5.2032,
"step": 160
},
{
"epoch": 0.1653272101033295,
"grad_norm": 0.7317622303962708,
"learning_rate": 9.755910346945042e-05,
"loss": 5.1694,
"step": 180
},
{
"epoch": 0.18369690011481057,
"grad_norm": 0.624251663684845,
"learning_rate": 9.72520724593184e-05,
"loss": 5.15,
"step": 200
},
{
"epoch": 0.2020665901262916,
"grad_norm": 0.6709743142127991,
"learning_rate": 9.694504144918637e-05,
"loss": 5.233,
"step": 220
},
{
"epoch": 0.22043628013777267,
"grad_norm": 0.6651622653007507,
"learning_rate": 9.663801043905434e-05,
"loss": 5.233,
"step": 240
},
{
"epoch": 0.23880597014925373,
"grad_norm": 0.6254720091819763,
"learning_rate": 9.633097942892233e-05,
"loss": 5.2223,
"step": 260
},
{
"epoch": 0.2571756601607348,
"grad_norm": 0.6242057681083679,
"learning_rate": 9.602394841879029e-05,
"loss": 5.1399,
"step": 280
},
{
"epoch": 0.2755453501722158,
"grad_norm": 0.6652698516845703,
"learning_rate": 9.571691740865828e-05,
"loss": 5.2068,
"step": 300
},
{
"epoch": 0.2939150401836969,
"grad_norm": 0.8067322373390198,
"learning_rate": 9.540988639852626e-05,
"loss": 5.1546,
"step": 320
},
{
"epoch": 0.31228473019517794,
"grad_norm": 0.8131216764450073,
"learning_rate": 9.510285538839423e-05,
"loss": 5.2022,
"step": 340
},
{
"epoch": 0.330654420206659,
"grad_norm": 0.7776964902877808,
"learning_rate": 9.47958243782622e-05,
"loss": 5.2079,
"step": 360
},
{
"epoch": 0.34902411021814006,
"grad_norm": 0.7237767577171326,
"learning_rate": 9.448879336813019e-05,
"loss": 5.1812,
"step": 380
},
{
"epoch": 0.36739380022962115,
"grad_norm": 0.8634713292121887,
"learning_rate": 9.418176235799817e-05,
"loss": 5.1857,
"step": 400
},
{
"epoch": 0.3857634902411022,
"grad_norm": 0.6349618434906006,
"learning_rate": 9.387473134786614e-05,
"loss": 5.1357,
"step": 420
},
{
"epoch": 0.4041331802525832,
"grad_norm": 0.6394773721694946,
"learning_rate": 9.356770033773412e-05,
"loss": 5.1004,
"step": 440
},
{
"epoch": 0.4225028702640643,
"grad_norm": 0.7400641441345215,
"learning_rate": 9.326066932760209e-05,
"loss": 5.1123,
"step": 460
},
{
"epoch": 0.44087256027554533,
"grad_norm": 0.6799914836883545,
"learning_rate": 9.295363831747008e-05,
"loss": 5.1645,
"step": 480
},
{
"epoch": 0.4592422502870264,
"grad_norm": 0.7447289824485779,
"learning_rate": 9.264660730733804e-05,
"loss": 5.0634,
"step": 500
},
{
"epoch": 0.47761194029850745,
"grad_norm": 0.6091820597648621,
"learning_rate": 9.233957629720603e-05,
"loss": 5.1632,
"step": 520
},
{
"epoch": 0.49598163030998854,
"grad_norm": 0.7650216221809387,
"learning_rate": 9.2032545287074e-05,
"loss": 5.1083,
"step": 540
},
{
"epoch": 0.5143513203214696,
"grad_norm": 0.6420841813087463,
"learning_rate": 9.172551427694198e-05,
"loss": 5.1332,
"step": 560
},
{
"epoch": 0.5327210103329506,
"grad_norm": 0.6499956846237183,
"learning_rate": 9.141848326680995e-05,
"loss": 5.1091,
"step": 580
},
{
"epoch": 0.5510907003444316,
"grad_norm": 0.6708125472068787,
"learning_rate": 9.111145225667794e-05,
"loss": 5.1761,
"step": 600
},
{
"epoch": 0.5694603903559128,
"grad_norm": 0.6679044961929321,
"learning_rate": 9.08044212465459e-05,
"loss": 5.1747,
"step": 620
},
{
"epoch": 0.5878300803673938,
"grad_norm": 0.6638289093971252,
"learning_rate": 9.049739023641389e-05,
"loss": 5.2075,
"step": 640
},
{
"epoch": 0.6061997703788748,
"grad_norm": 0.6478453278541565,
"learning_rate": 9.019035922628186e-05,
"loss": 5.0887,
"step": 660
},
{
"epoch": 0.6245694603903559,
"grad_norm": 0.7284302711486816,
"learning_rate": 8.988332821614984e-05,
"loss": 5.031,
"step": 680
},
{
"epoch": 0.642939150401837,
"grad_norm": 0.635795533657074,
"learning_rate": 8.957629720601781e-05,
"loss": 5.1412,
"step": 700
},
{
"epoch": 0.661308840413318,
"grad_norm": 0.6595786213874817,
"learning_rate": 8.92692661958858e-05,
"loss": 5.1112,
"step": 720
},
{
"epoch": 0.6796785304247991,
"grad_norm": 0.8448815941810608,
"learning_rate": 8.896223518575376e-05,
"loss": 5.1198,
"step": 740
},
{
"epoch": 0.6980482204362801,
"grad_norm": 0.7050981521606445,
"learning_rate": 8.865520417562175e-05,
"loss": 5.1194,
"step": 760
},
{
"epoch": 0.7164179104477612,
"grad_norm": 0.618584394454956,
"learning_rate": 8.834817316548972e-05,
"loss": 5.0858,
"step": 780
},
{
"epoch": 0.7347876004592423,
"grad_norm": 0.711953341960907,
"learning_rate": 8.80411421553577e-05,
"loss": 5.0608,
"step": 800
},
{
"epoch": 0.7531572904707233,
"grad_norm": 0.6575554609298706,
"learning_rate": 8.773411114522567e-05,
"loss": 5.122,
"step": 820
},
{
"epoch": 0.7715269804822044,
"grad_norm": 0.6340029835700989,
"learning_rate": 8.742708013509366e-05,
"loss": 5.1082,
"step": 840
},
{
"epoch": 0.7898966704936854,
"grad_norm": 0.6645310521125793,
"learning_rate": 8.712004912496162e-05,
"loss": 5.0621,
"step": 860
},
{
"epoch": 0.8082663605051664,
"grad_norm": 0.7187919020652771,
"learning_rate": 8.681301811482961e-05,
"loss": 5.0188,
"step": 880
},
{
"epoch": 0.8266360505166476,
"grad_norm": 0.591472327709198,
"learning_rate": 8.650598710469758e-05,
"loss": 5.0586,
"step": 900
},
{
"epoch": 0.8450057405281286,
"grad_norm": 0.7377704381942749,
"learning_rate": 8.619895609456556e-05,
"loss": 5.0832,
"step": 920
},
{
"epoch": 0.8633754305396096,
"grad_norm": 0.7042536735534668,
"learning_rate": 8.589192508443353e-05,
"loss": 5.0497,
"step": 940
},
{
"epoch": 0.8817451205510907,
"grad_norm": 0.630856990814209,
"learning_rate": 8.558489407430152e-05,
"loss": 5.1131,
"step": 960
},
{
"epoch": 0.9001148105625718,
"grad_norm": 0.7756277322769165,
"learning_rate": 8.527786306416948e-05,
"loss": 5.1407,
"step": 980
},
{
"epoch": 0.9184845005740528,
"grad_norm": 0.6865280866622925,
"learning_rate": 8.497083205403747e-05,
"loss": 5.0728,
"step": 1000
},
{
"epoch": 0.9368541905855339,
"grad_norm": 0.84028160572052,
"learning_rate": 8.466380104390543e-05,
"loss": 5.0406,
"step": 1020
},
{
"epoch": 0.9552238805970149,
"grad_norm": 0.700905442237854,
"learning_rate": 8.435677003377342e-05,
"loss": 5.0948,
"step": 1040
},
{
"epoch": 0.9735935706084959,
"grad_norm": 0.8270923495292664,
"learning_rate": 8.404973902364139e-05,
"loss": 5.1278,
"step": 1060
},
{
"epoch": 0.9919632606199771,
"grad_norm": 0.7246171236038208,
"learning_rate": 8.374270801350936e-05,
"loss": 5.177,
"step": 1080
},
{
"epoch": 1.0,
"eval_loss": 4.98171854019165,
"eval_runtime": 159.7626,
"eval_samples_per_second": 6.059,
"eval_steps_per_second": 3.029,
"step": 1089
},
{
"epoch": 1.0101033295063147,
"grad_norm": 0.7068125605583191,
"learning_rate": 8.343567700337734e-05,
"loss": 4.9922,
"step": 1100
},
{
"epoch": 1.0284730195177956,
"grad_norm": 0.6738723516464233,
"learning_rate": 8.312864599324533e-05,
"loss": 4.9379,
"step": 1120
},
{
"epoch": 1.0468427095292767,
"grad_norm": 0.7351597547531128,
"learning_rate": 8.282161498311329e-05,
"loss": 4.8823,
"step": 1140
},
{
"epoch": 1.0652123995407576,
"grad_norm": 0.7661564350128174,
"learning_rate": 8.251458397298128e-05,
"loss": 4.8654,
"step": 1160
},
{
"epoch": 1.0835820895522388,
"grad_norm": 0.7068904638290405,
"learning_rate": 8.220755296284925e-05,
"loss": 4.8782,
"step": 1180
},
{
"epoch": 1.10195177956372,
"grad_norm": 0.8419404029846191,
"learning_rate": 8.190052195271722e-05,
"loss": 4.8636,
"step": 1200
},
{
"epoch": 1.1203214695752008,
"grad_norm": 0.9159519076347351,
"learning_rate": 8.15934909425852e-05,
"loss": 4.8858,
"step": 1220
},
{
"epoch": 1.138691159586682,
"grad_norm": 0.743350088596344,
"learning_rate": 8.128645993245319e-05,
"loss": 4.8809,
"step": 1240
},
{
"epoch": 1.157060849598163,
"grad_norm": 0.737196683883667,
"learning_rate": 8.097942892232115e-05,
"loss": 4.8634,
"step": 1260
},
{
"epoch": 1.175430539609644,
"grad_norm": 0.7130833864212036,
"learning_rate": 8.067239791218914e-05,
"loss": 4.9034,
"step": 1280
},
{
"epoch": 1.1938002296211252,
"grad_norm": 0.8160459995269775,
"learning_rate": 8.036536690205711e-05,
"loss": 4.8548,
"step": 1300
},
{
"epoch": 1.2121699196326061,
"grad_norm": 0.6838780641555786,
"learning_rate": 8.005833589192508e-05,
"loss": 4.9454,
"step": 1320
},
{
"epoch": 1.2305396096440873,
"grad_norm": 0.85508131980896,
"learning_rate": 7.975130488179306e-05,
"loss": 4.9991,
"step": 1340
},
{
"epoch": 1.2489092996555684,
"grad_norm": 0.7988117933273315,
"learning_rate": 7.944427387166105e-05,
"loss": 4.8513,
"step": 1360
},
{
"epoch": 1.2672789896670493,
"grad_norm": 0.7242785096168518,
"learning_rate": 7.913724286152902e-05,
"loss": 4.9535,
"step": 1380
},
{
"epoch": 1.2856486796785305,
"grad_norm": 0.8718686699867249,
"learning_rate": 7.8830211851397e-05,
"loss": 4.8674,
"step": 1400
},
{
"epoch": 1.3040183696900116,
"grad_norm": 0.6738030910491943,
"learning_rate": 7.852318084126497e-05,
"loss": 4.9205,
"step": 1420
},
{
"epoch": 1.3223880597014925,
"grad_norm": 0.8262588381767273,
"learning_rate": 7.821614983113295e-05,
"loss": 4.9733,
"step": 1440
},
{
"epoch": 1.3407577497129735,
"grad_norm": 0.807981014251709,
"learning_rate": 7.790911882100092e-05,
"loss": 4.9438,
"step": 1460
},
{
"epoch": 1.3591274397244546,
"grad_norm": 0.7458767890930176,
"learning_rate": 7.760208781086891e-05,
"loss": 4.8945,
"step": 1480
},
{
"epoch": 1.3774971297359357,
"grad_norm": 0.747597873210907,
"learning_rate": 7.729505680073688e-05,
"loss": 4.8947,
"step": 1500
},
{
"epoch": 1.3958668197474169,
"grad_norm": 0.724322497844696,
"learning_rate": 7.698802579060486e-05,
"loss": 4.9132,
"step": 1520
},
{
"epoch": 1.4142365097588978,
"grad_norm": 0.7647684812545776,
"learning_rate": 7.668099478047283e-05,
"loss": 4.9085,
"step": 1540
},
{
"epoch": 1.432606199770379,
"grad_norm": 0.7980235815048218,
"learning_rate": 7.63739637703408e-05,
"loss": 4.8458,
"step": 1560
},
{
"epoch": 1.4509758897818599,
"grad_norm": 0.9437423944473267,
"learning_rate": 7.606693276020879e-05,
"loss": 4.8771,
"step": 1580
},
{
"epoch": 1.469345579793341,
"grad_norm": 0.7015650272369385,
"learning_rate": 7.575990175007675e-05,
"loss": 4.9099,
"step": 1600
},
{
"epoch": 1.4877152698048222,
"grad_norm": 0.8654283285140991,
"learning_rate": 7.545287073994474e-05,
"loss": 4.8848,
"step": 1620
},
{
"epoch": 1.506084959816303,
"grad_norm": 0.8487702012062073,
"learning_rate": 7.514583972981272e-05,
"loss": 4.9274,
"step": 1640
},
{
"epoch": 1.524454649827784,
"grad_norm": 0.894727349281311,
"learning_rate": 7.483880871968069e-05,
"loss": 4.9121,
"step": 1660
},
{
"epoch": 1.5428243398392651,
"grad_norm": 0.7763797640800476,
"learning_rate": 7.453177770954867e-05,
"loss": 4.9296,
"step": 1680
},
{
"epoch": 1.5611940298507463,
"grad_norm": 0.7502116560935974,
"learning_rate": 7.422474669941665e-05,
"loss": 4.9129,
"step": 1700
},
{
"epoch": 1.5795637198622274,
"grad_norm": 0.9286855459213257,
"learning_rate": 7.391771568928461e-05,
"loss": 4.8938,
"step": 1720
},
{
"epoch": 1.5979334098737084,
"grad_norm": 0.8818989396095276,
"learning_rate": 7.36106846791526e-05,
"loss": 4.8487,
"step": 1740
},
{
"epoch": 1.6163030998851893,
"grad_norm": 0.8271352648735046,
"learning_rate": 7.330365366902058e-05,
"loss": 4.9022,
"step": 1760
},
{
"epoch": 1.6346727898966704,
"grad_norm": 0.7982690334320068,
"learning_rate": 7.299662265888855e-05,
"loss": 4.8769,
"step": 1780
},
{
"epoch": 1.6530424799081516,
"grad_norm": 0.8425403833389282,
"learning_rate": 7.268959164875653e-05,
"loss": 4.9173,
"step": 1800
},
{
"epoch": 1.6714121699196327,
"grad_norm": 0.7963587045669556,
"learning_rate": 7.238256063862451e-05,
"loss": 4.849,
"step": 1820
},
{
"epoch": 1.6897818599311136,
"grad_norm": 0.8965281248092651,
"learning_rate": 7.207552962849247e-05,
"loss": 4.9365,
"step": 1840
},
{
"epoch": 1.7081515499425948,
"grad_norm": 0.7666225433349609,
"learning_rate": 7.176849861836046e-05,
"loss": 4.828,
"step": 1860
},
{
"epoch": 1.7265212399540757,
"grad_norm": 0.752511203289032,
"learning_rate": 7.146146760822844e-05,
"loss": 4.8823,
"step": 1880
},
{
"epoch": 1.7448909299655568,
"grad_norm": 0.9949547052383423,
"learning_rate": 7.115443659809641e-05,
"loss": 4.9664,
"step": 1900
},
{
"epoch": 1.763260619977038,
"grad_norm": 0.8455734848976135,
"learning_rate": 7.084740558796439e-05,
"loss": 4.9238,
"step": 1920
},
{
"epoch": 1.781630309988519,
"grad_norm": 0.8804666996002197,
"learning_rate": 7.054037457783237e-05,
"loss": 4.9111,
"step": 1940
},
{
"epoch": 1.8,
"grad_norm": 0.8722001910209656,
"learning_rate": 7.023334356770033e-05,
"loss": 4.8461,
"step": 1960
},
{
"epoch": 1.818369690011481,
"grad_norm": 0.9130426049232483,
"learning_rate": 6.992631255756832e-05,
"loss": 4.8927,
"step": 1980
},
{
"epoch": 1.836739380022962,
"grad_norm": 0.8905944228172302,
"learning_rate": 6.96192815474363e-05,
"loss": 4.9238,
"step": 2000
},
{
"epoch": 1.8551090700344433,
"grad_norm": 0.8010729551315308,
"learning_rate": 6.931225053730427e-05,
"loss": 4.8703,
"step": 2020
},
{
"epoch": 1.8734787600459242,
"grad_norm": 0.7367790937423706,
"learning_rate": 6.900521952717225e-05,
"loss": 4.8673,
"step": 2040
},
{
"epoch": 1.8918484500574053,
"grad_norm": 0.7457343339920044,
"learning_rate": 6.869818851704023e-05,
"loss": 4.8702,
"step": 2060
},
{
"epoch": 1.9102181400688862,
"grad_norm": 0.9334909319877625,
"learning_rate": 6.83911575069082e-05,
"loss": 4.973,
"step": 2080
},
{
"epoch": 1.9285878300803674,
"grad_norm": 0.7645395994186401,
"learning_rate": 6.808412649677618e-05,
"loss": 4.8706,
"step": 2100
},
{
"epoch": 1.9469575200918485,
"grad_norm": 0.8864799737930298,
"learning_rate": 6.777709548664414e-05,
"loss": 4.9114,
"step": 2120
},
{
"epoch": 1.9653272101033297,
"grad_norm": 0.995495617389679,
"learning_rate": 6.747006447651213e-05,
"loss": 4.9208,
"step": 2140
},
{
"epoch": 1.9836969001148106,
"grad_norm": 0.7982215881347656,
"learning_rate": 6.71630334663801e-05,
"loss": 4.8581,
"step": 2160
},
{
"epoch": 2.0,
"eval_loss": 4.952657699584961,
"eval_runtime": 161.3919,
"eval_samples_per_second": 5.998,
"eval_steps_per_second": 2.999,
"step": 2178
},
{
"epoch": 2.001836969001148,
"grad_norm": 0.6282178163528442,
"learning_rate": 6.685600245624808e-05,
"loss": 4.8444,
"step": 2180
},
{
"epoch": 2.0202066590126293,
"grad_norm": 0.894764244556427,
"learning_rate": 6.654897144611605e-05,
"loss": 4.6306,
"step": 2200
},
{
"epoch": 2.03857634902411,
"grad_norm": 0.9108575582504272,
"learning_rate": 6.624194043598404e-05,
"loss": 4.6439,
"step": 2220
},
{
"epoch": 2.056946039035591,
"grad_norm": 0.8883677124977112,
"learning_rate": 6.5934909425852e-05,
"loss": 4.6667,
"step": 2240
},
{
"epoch": 2.0753157290470723,
"grad_norm": 0.8742470741271973,
"learning_rate": 6.562787841571999e-05,
"loss": 4.6403,
"step": 2260
},
{
"epoch": 2.0936854190585534,
"grad_norm": 0.883115291595459,
"learning_rate": 6.532084740558797e-05,
"loss": 4.7058,
"step": 2280
},
{
"epoch": 2.1120551090700346,
"grad_norm": 0.9962688088417053,
"learning_rate": 6.501381639545594e-05,
"loss": 4.6428,
"step": 2300
},
{
"epoch": 2.1304247990815153,
"grad_norm": 0.8967293500900269,
"learning_rate": 6.470678538532391e-05,
"loss": 4.7033,
"step": 2320
},
{
"epoch": 2.1487944890929964,
"grad_norm": 0.8904005289077759,
"learning_rate": 6.43997543751919e-05,
"loss": 4.6883,
"step": 2340
},
{
"epoch": 2.1671641791044776,
"grad_norm": 0.9912065267562866,
"learning_rate": 6.409272336505986e-05,
"loss": 4.6034,
"step": 2360
},
{
"epoch": 2.1855338691159587,
"grad_norm": 0.7966124415397644,
"learning_rate": 6.378569235492785e-05,
"loss": 4.5939,
"step": 2380
},
{
"epoch": 2.20390355912744,
"grad_norm": 0.8692038059234619,
"learning_rate": 6.347866134479583e-05,
"loss": 4.635,
"step": 2400
},
{
"epoch": 2.2222732491389205,
"grad_norm": 0.9523972868919373,
"learning_rate": 6.31716303346638e-05,
"loss": 4.5906,
"step": 2420
},
{
"epoch": 2.2406429391504017,
"grad_norm": 1.0009050369262695,
"learning_rate": 6.286459932453177e-05,
"loss": 4.622,
"step": 2440
},
{
"epoch": 2.259012629161883,
"grad_norm": 0.9459270238876343,
"learning_rate": 6.255756831439976e-05,
"loss": 4.6509,
"step": 2460
},
{
"epoch": 2.277382319173364,
"grad_norm": 0.8043718934059143,
"learning_rate": 6.225053730426774e-05,
"loss": 4.6445,
"step": 2480
},
{
"epoch": 2.295752009184845,
"grad_norm": 0.916283130645752,
"learning_rate": 6.194350629413571e-05,
"loss": 4.6549,
"step": 2500
},
{
"epoch": 2.314121699196326,
"grad_norm": 0.9356532096862793,
"learning_rate": 6.163647528400369e-05,
"loss": 4.6616,
"step": 2520
},
{
"epoch": 2.332491389207807,
"grad_norm": 0.9441368579864502,
"learning_rate": 6.132944427387166e-05,
"loss": 4.6529,
"step": 2540
},
{
"epoch": 2.350861079219288,
"grad_norm": 0.8593648672103882,
"learning_rate": 6.102241326373964e-05,
"loss": 4.6266,
"step": 2560
},
{
"epoch": 2.3692307692307693,
"grad_norm": 1.0274901390075684,
"learning_rate": 6.071538225360762e-05,
"loss": 4.6506,
"step": 2580
},
{
"epoch": 2.3876004592422504,
"grad_norm": 0.8973293900489807,
"learning_rate": 6.040835124347559e-05,
"loss": 4.6335,
"step": 2600
},
{
"epoch": 2.405970149253731,
"grad_norm": 0.7993806600570679,
"learning_rate": 6.010132023334357e-05,
"loss": 4.6048,
"step": 2620
},
{
"epoch": 2.4243398392652122,
"grad_norm": 1.0597416162490845,
"learning_rate": 5.979428922321155e-05,
"loss": 4.6689,
"step": 2640
},
{
"epoch": 2.4427095292766934,
"grad_norm": 0.8965485095977783,
"learning_rate": 5.948725821307952e-05,
"loss": 4.7001,
"step": 2660
},
{
"epoch": 2.4610792192881745,
"grad_norm": 1.2203742265701294,
"learning_rate": 5.91802272029475e-05,
"loss": 4.6136,
"step": 2680
},
{
"epoch": 2.4794489092996557,
"grad_norm": 0.8789367079734802,
"learning_rate": 5.887319619281547e-05,
"loss": 4.6247,
"step": 2700
},
{
"epoch": 2.497818599311137,
"grad_norm": 0.997754693031311,
"learning_rate": 5.856616518268345e-05,
"loss": 4.6653,
"step": 2720
},
{
"epoch": 2.516188289322618,
"grad_norm": 1.009928822517395,
"learning_rate": 5.825913417255143e-05,
"loss": 4.6168,
"step": 2740
},
{
"epoch": 2.5345579793340987,
"grad_norm": 0.919626772403717,
"learning_rate": 5.79521031624194e-05,
"loss": 4.6677,
"step": 2760
},
{
"epoch": 2.55292766934558,
"grad_norm": 0.8129732012748718,
"learning_rate": 5.764507215228738e-05,
"loss": 4.6061,
"step": 2780
},
{
"epoch": 2.571297359357061,
"grad_norm": 0.8821771144866943,
"learning_rate": 5.733804114215536e-05,
"loss": 4.5725,
"step": 2800
},
{
"epoch": 2.5896670493685416,
"grad_norm": 1.150881052017212,
"learning_rate": 5.703101013202333e-05,
"loss": 4.6755,
"step": 2820
},
{
"epoch": 2.6080367393800232,
"grad_norm": 1.1142388582229614,
"learning_rate": 5.672397912189131e-05,
"loss": 4.6339,
"step": 2840
},
{
"epoch": 2.626406429391504,
"grad_norm": 1.0733916759490967,
"learning_rate": 5.641694811175929e-05,
"loss": 4.6091,
"step": 2860
},
{
"epoch": 2.644776119402985,
"grad_norm": 0.9986577033996582,
"learning_rate": 5.610991710162726e-05,
"loss": 4.6656,
"step": 2880
},
{
"epoch": 2.663145809414466,
"grad_norm": 1.7999745607376099,
"learning_rate": 5.580288609149524e-05,
"loss": 4.7289,
"step": 2900
},
{
"epoch": 2.681515499425947,
"grad_norm": 0.8412841558456421,
"learning_rate": 5.549585508136322e-05,
"loss": 4.6984,
"step": 2920
},
{
"epoch": 2.6998851894374285,
"grad_norm": 0.8901891708374023,
"learning_rate": 5.5188824071231197e-05,
"loss": 4.6874,
"step": 2940
},
{
"epoch": 2.718254879448909,
"grad_norm": 0.946198046207428,
"learning_rate": 5.488179306109917e-05,
"loss": 4.669,
"step": 2960
},
{
"epoch": 2.7366245694603903,
"grad_norm": 0.9087753891944885,
"learning_rate": 5.457476205096715e-05,
"loss": 4.6541,
"step": 2980
},
{
"epoch": 2.7549942594718715,
"grad_norm": 1.0043200254440308,
"learning_rate": 5.426773104083513e-05,
"loss": 4.6556,
"step": 3000
},
{
"epoch": 2.7733639494833526,
"grad_norm": 0.968223512172699,
"learning_rate": 5.39607000307031e-05,
"loss": 4.7655,
"step": 3020
},
{
"epoch": 2.7917336394948338,
"grad_norm": 1.2286486625671387,
"learning_rate": 5.365366902057108e-05,
"loss": 4.6863,
"step": 3040
},
{
"epoch": 2.8101033295063145,
"grad_norm": 1.153100609779358,
"learning_rate": 5.334663801043906e-05,
"loss": 4.6555,
"step": 3060
},
{
"epoch": 2.8284730195177956,
"grad_norm": 1.01992666721344,
"learning_rate": 5.303960700030703e-05,
"loss": 4.6923,
"step": 3080
},
{
"epoch": 2.8468427095292768,
"grad_norm": 1.1643304824829102,
"learning_rate": 5.273257599017501e-05,
"loss": 4.6761,
"step": 3100
},
{
"epoch": 2.865212399540758,
"grad_norm": 0.9204754829406738,
"learning_rate": 5.242554498004299e-05,
"loss": 4.6726,
"step": 3120
},
{
"epoch": 2.883582089552239,
"grad_norm": 1.0050371885299683,
"learning_rate": 5.211851396991097e-05,
"loss": 4.7501,
"step": 3140
},
{
"epoch": 2.9019517795637197,
"grad_norm": 0.9284995794296265,
"learning_rate": 5.181148295977894e-05,
"loss": 4.6609,
"step": 3160
},
{
"epoch": 2.920321469575201,
"grad_norm": 0.9417170286178589,
"learning_rate": 5.150445194964692e-05,
"loss": 4.6667,
"step": 3180
},
{
"epoch": 2.938691159586682,
"grad_norm": 1.4755141735076904,
"learning_rate": 5.11974209395149e-05,
"loss": 4.6151,
"step": 3200
},
{
"epoch": 2.957060849598163,
"grad_norm": 0.9110747575759888,
"learning_rate": 5.089038992938287e-05,
"loss": 4.6703,
"step": 3220
},
{
"epoch": 2.9754305396096443,
"grad_norm": 1.295502781867981,
"learning_rate": 5.058335891925085e-05,
"loss": 4.7312,
"step": 3240
},
{
"epoch": 2.993800229621125,
"grad_norm": 1.072573184967041,
"learning_rate": 5.027632790911883e-05,
"loss": 4.6101,
"step": 3260
},
{
"epoch": 3.0,
"eval_loss": 5.012691974639893,
"eval_runtime": 161.4677,
"eval_samples_per_second": 5.995,
"eval_steps_per_second": 2.998,
"step": 3267
},
{
"epoch": 3.011940298507463,
"grad_norm": 0.8994636535644531,
"learning_rate": 4.99692968989868e-05,
"loss": 4.4452,
"step": 3280
},
{
"epoch": 3.030309988518944,
"grad_norm": 1.228932499885559,
"learning_rate": 4.966226588885478e-05,
"loss": 4.4106,
"step": 3300
},
{
"epoch": 3.0486796785304247,
"grad_norm": 1.0340349674224854,
"learning_rate": 4.935523487872276e-05,
"loss": 4.4157,
"step": 3320
},
{
"epoch": 3.067049368541906,
"grad_norm": 1.091071367263794,
"learning_rate": 4.904820386859073e-05,
"loss": 4.3893,
"step": 3340
},
{
"epoch": 3.085419058553387,
"grad_norm": 0.974187433719635,
"learning_rate": 4.874117285845871e-05,
"loss": 4.4317,
"step": 3360
},
{
"epoch": 3.103788748564868,
"grad_norm": 1.13187837600708,
"learning_rate": 4.843414184832668e-05,
"loss": 4.4274,
"step": 3380
},
{
"epoch": 3.1221584385763492,
"grad_norm": 0.9200457334518433,
"learning_rate": 4.812711083819466e-05,
"loss": 4.3789,
"step": 3400
},
{
"epoch": 3.14052812858783,
"grad_norm": 1.0937250852584839,
"learning_rate": 4.782007982806264e-05,
"loss": 4.3659,
"step": 3420
},
{
"epoch": 3.158897818599311,
"grad_norm": 1.0695732831954956,
"learning_rate": 4.751304881793061e-05,
"loss": 4.417,
"step": 3440
},
{
"epoch": 3.177267508610792,
"grad_norm": 1.0328212976455688,
"learning_rate": 4.720601780779859e-05,
"loss": 4.3615,
"step": 3460
},
{
"epoch": 3.1956371986222734,
"grad_norm": 0.9559247493743896,
"learning_rate": 4.689898679766657e-05,
"loss": 4.4404,
"step": 3480
},
{
"epoch": 3.2140068886337545,
"grad_norm": 1.1086647510528564,
"learning_rate": 4.659195578753454e-05,
"loss": 4.3502,
"step": 3500
},
{
"epoch": 3.232376578645235,
"grad_norm": 1.0105242729187012,
"learning_rate": 4.628492477740252e-05,
"loss": 4.3852,
"step": 3520
},
{
"epoch": 3.2507462686567163,
"grad_norm": 1.1470004320144653,
"learning_rate": 4.59778937672705e-05,
"loss": 4.4163,
"step": 3540
},
{
"epoch": 3.2691159586681975,
"grad_norm": 1.0169470310211182,
"learning_rate": 4.567086275713847e-05,
"loss": 4.4186,
"step": 3560
},
{
"epoch": 3.2874856486796786,
"grad_norm": 1.2762373685836792,
"learning_rate": 4.536383174700645e-05,
"loss": 4.4134,
"step": 3580
},
{
"epoch": 3.3058553386911598,
"grad_norm": 1.4351933002471924,
"learning_rate": 4.505680073687443e-05,
"loss": 4.3983,
"step": 3600
},
{
"epoch": 3.3242250287026405,
"grad_norm": 1.1447066068649292,
"learning_rate": 4.47497697267424e-05,
"loss": 4.3949,
"step": 3620
},
{
"epoch": 3.3425947187141216,
"grad_norm": 1.0704272985458374,
"learning_rate": 4.4442738716610376e-05,
"loss": 4.409,
"step": 3640
},
{
"epoch": 3.3609644087256028,
"grad_norm": 1.346151351928711,
"learning_rate": 4.413570770647836e-05,
"loss": 4.4447,
"step": 3660
},
{
"epoch": 3.379334098737084,
"grad_norm": 1.2971051931381226,
"learning_rate": 4.382867669634633e-05,
"loss": 4.4367,
"step": 3680
},
{
"epoch": 3.397703788748565,
"grad_norm": 0.9847575426101685,
"learning_rate": 4.3521645686214306e-05,
"loss": 4.4296,
"step": 3700
},
{
"epoch": 3.4160734787600457,
"grad_norm": 1.247763752937317,
"learning_rate": 4.321461467608229e-05,
"loss": 4.3508,
"step": 3720
},
{
"epoch": 3.434443168771527,
"grad_norm": 1.2263673543930054,
"learning_rate": 4.290758366595026e-05,
"loss": 4.4014,
"step": 3740
},
{
"epoch": 3.452812858783008,
"grad_norm": 0.925714910030365,
"learning_rate": 4.2600552655818236e-05,
"loss": 4.3741,
"step": 3760
},
{
"epoch": 3.471182548794489,
"grad_norm": 1.2875808477401733,
"learning_rate": 4.229352164568622e-05,
"loss": 4.4081,
"step": 3780
},
{
"epoch": 3.4895522388059703,
"grad_norm": 1.1704316139221191,
"learning_rate": 4.198649063555419e-05,
"loss": 4.4046,
"step": 3800
},
{
"epoch": 3.507921928817451,
"grad_norm": 1.2848256826400757,
"learning_rate": 4.1679459625422166e-05,
"loss": 4.4377,
"step": 3820
},
{
"epoch": 3.526291618828932,
"grad_norm": 1.0369383096694946,
"learning_rate": 4.137242861529015e-05,
"loss": 4.4256,
"step": 3840
},
{
"epoch": 3.5446613088404133,
"grad_norm": 1.3157178163528442,
"learning_rate": 4.106539760515812e-05,
"loss": 4.4593,
"step": 3860
},
{
"epoch": 3.5630309988518944,
"grad_norm": 1.056178331375122,
"learning_rate": 4.0758366595026096e-05,
"loss": 4.3933,
"step": 3880
},
{
"epoch": 3.5814006888633756,
"grad_norm": 1.218313217163086,
"learning_rate": 4.045133558489408e-05,
"loss": 4.3811,
"step": 3900
},
{
"epoch": 3.5997703788748563,
"grad_norm": 1.447138786315918,
"learning_rate": 4.014430457476205e-05,
"loss": 4.3999,
"step": 3920
},
{
"epoch": 3.6181400688863374,
"grad_norm": 1.0364395380020142,
"learning_rate": 3.9837273564630026e-05,
"loss": 4.3874,
"step": 3940
},
{
"epoch": 3.6365097588978186,
"grad_norm": 1.1889121532440186,
"learning_rate": 3.9530242554498e-05,
"loss": 4.4304,
"step": 3960
},
{
"epoch": 3.6548794489092997,
"grad_norm": 1.056187629699707,
"learning_rate": 3.922321154436598e-05,
"loss": 4.3609,
"step": 3980
},
{
"epoch": 3.673249138920781,
"grad_norm": 1.1961441040039062,
"learning_rate": 3.8916180534233957e-05,
"loss": 4.4391,
"step": 4000
},
{
"epoch": 3.6916188289322616,
"grad_norm": 1.176295518875122,
"learning_rate": 3.860914952410193e-05,
"loss": 4.478,
"step": 4020
},
{
"epoch": 3.7099885189437427,
"grad_norm": 1.1515350341796875,
"learning_rate": 3.830211851396991e-05,
"loss": 4.3721,
"step": 4040
},
{
"epoch": 3.728358208955224,
"grad_norm": 1.13578200340271,
"learning_rate": 3.7995087503837887e-05,
"loss": 4.4138,
"step": 4060
},
{
"epoch": 3.746727898966705,
"grad_norm": 1.1046961545944214,
"learning_rate": 3.768805649370587e-05,
"loss": 4.4361,
"step": 4080
},
{
"epoch": 3.765097588978186,
"grad_norm": 1.043782353401184,
"learning_rate": 3.738102548357384e-05,
"loss": 4.4268,
"step": 4100
},
{
"epoch": 3.783467278989667,
"grad_norm": 1.1587445735931396,
"learning_rate": 3.707399447344182e-05,
"loss": 4.4048,
"step": 4120
},
{
"epoch": 3.801836969001148,
"grad_norm": 1.2178658246994019,
"learning_rate": 3.67669634633098e-05,
"loss": 4.426,
"step": 4140
},
{
"epoch": 3.820206659012629,
"grad_norm": 1.1141862869262695,
"learning_rate": 3.645993245317777e-05,
"loss": 4.4415,
"step": 4160
},
{
"epoch": 3.8385763490241103,
"grad_norm": 1.062812089920044,
"learning_rate": 3.6152901443045753e-05,
"loss": 4.4025,
"step": 4180
},
{
"epoch": 3.8569460390355914,
"grad_norm": 1.1655863523483276,
"learning_rate": 3.584587043291373e-05,
"loss": 4.4325,
"step": 4200
},
{
"epoch": 3.875315729047072,
"grad_norm": 1.1558030843734741,
"learning_rate": 3.55388394227817e-05,
"loss": 4.4379,
"step": 4220
},
{
"epoch": 3.8936854190585533,
"grad_norm": 1.0342259407043457,
"learning_rate": 3.5231808412649684e-05,
"loss": 4.4413,
"step": 4240
},
{
"epoch": 3.9120551090700344,
"grad_norm": 0.9829803109169006,
"learning_rate": 3.492477740251766e-05,
"loss": 4.4537,
"step": 4260
},
{
"epoch": 3.9304247990815155,
"grad_norm": 1.0746711492538452,
"learning_rate": 3.461774639238563e-05,
"loss": 4.3644,
"step": 4280
},
{
"epoch": 3.9487944890929967,
"grad_norm": 1.125643253326416,
"learning_rate": 3.4310715382253614e-05,
"loss": 4.3606,
"step": 4300
},
{
"epoch": 3.9671641791044774,
"grad_norm": 1.1107122898101807,
"learning_rate": 3.400368437212159e-05,
"loss": 4.3981,
"step": 4320
},
{
"epoch": 3.9855338691159585,
"grad_norm": 1.262173056602478,
"learning_rate": 3.369665336198956e-05,
"loss": 4.4326,
"step": 4340
},
{
"epoch": 4.0,
"eval_loss": 5.145928382873535,
"eval_runtime": 159.4863,
"eval_samples_per_second": 6.069,
"eval_steps_per_second": 3.035,
"step": 4356
},
{
"epoch": 4.003673938002296,
"grad_norm": 0.9681905508041382,
"learning_rate": 3.3389622351857544e-05,
"loss": 4.3781,
"step": 4360
},
{
"epoch": 4.022043628013777,
"grad_norm": 1.497965693473816,
"learning_rate": 3.308259134172552e-05,
"loss": 4.2113,
"step": 4380
},
{
"epoch": 4.040413318025259,
"grad_norm": 0.9628502726554871,
"learning_rate": 3.277556033159349e-05,
"loss": 4.2562,
"step": 4400
},
{
"epoch": 4.058783008036739,
"grad_norm": 1.0885639190673828,
"learning_rate": 3.2468529321461474e-05,
"loss": 4.2,
"step": 4420
},
{
"epoch": 4.07715269804822,
"grad_norm": 0.9081327319145203,
"learning_rate": 3.216149831132945e-05,
"loss": 4.1522,
"step": 4440
},
{
"epoch": 4.095522388059702,
"grad_norm": 0.95115065574646,
"learning_rate": 3.185446730119742e-05,
"loss": 4.2762,
"step": 4460
},
{
"epoch": 4.113892078071182,
"grad_norm": 1.2338329553604126,
"learning_rate": 3.15474362910654e-05,
"loss": 4.2049,
"step": 4480
},
{
"epoch": 4.132261768082664,
"grad_norm": 1.256455421447754,
"learning_rate": 3.124040528093338e-05,
"loss": 4.2357,
"step": 4500
},
{
"epoch": 4.150631458094145,
"grad_norm": 1.296912431716919,
"learning_rate": 3.093337427080135e-05,
"loss": 4.2239,
"step": 4520
},
{
"epoch": 4.169001148105625,
"grad_norm": 1.1014831066131592,
"learning_rate": 3.062634326066933e-05,
"loss": 4.191,
"step": 4540
},
{
"epoch": 4.187370838117107,
"grad_norm": 1.0076754093170166,
"learning_rate": 3.0319312250537308e-05,
"loss": 4.2087,
"step": 4560
},
{
"epoch": 4.205740528128588,
"grad_norm": 0.9794378280639648,
"learning_rate": 3.0012281240405283e-05,
"loss": 4.2736,
"step": 4580
},
{
"epoch": 4.224110218140069,
"grad_norm": 1.0224289894104004,
"learning_rate": 2.9705250230273257e-05,
"loss": 4.2309,
"step": 4600
},
{
"epoch": 4.24247990815155,
"grad_norm": 1.120897889137268,
"learning_rate": 2.939821922014124e-05,
"loss": 4.2337,
"step": 4620
},
{
"epoch": 4.2608495981630305,
"grad_norm": 1.2583802938461304,
"learning_rate": 2.9091188210009213e-05,
"loss": 4.1689,
"step": 4640
},
{
"epoch": 4.279219288174512,
"grad_norm": 1.421479344367981,
"learning_rate": 2.8784157199877187e-05,
"loss": 4.2721,
"step": 4660
},
{
"epoch": 4.297588978185993,
"grad_norm": 1.2407679557800293,
"learning_rate": 2.847712618974517e-05,
"loss": 4.2649,
"step": 4680
},
{
"epoch": 4.315958668197474,
"grad_norm": 1.1917908191680908,
"learning_rate": 2.8170095179613143e-05,
"loss": 4.2253,
"step": 4700
},
{
"epoch": 4.334328358208955,
"grad_norm": 1.1925767660140991,
"learning_rate": 2.7863064169481117e-05,
"loss": 4.2396,
"step": 4720
},
{
"epoch": 4.352698048220436,
"grad_norm": 1.028193712234497,
"learning_rate": 2.75560331593491e-05,
"loss": 4.262,
"step": 4740
},
{
"epoch": 4.371067738231917,
"grad_norm": 1.3707988262176514,
"learning_rate": 2.7249002149217073e-05,
"loss": 4.2115,
"step": 4760
},
{
"epoch": 4.389437428243398,
"grad_norm": 1.086790919303894,
"learning_rate": 2.6941971139085047e-05,
"loss": 4.1938,
"step": 4780
},
{
"epoch": 4.40780711825488,
"grad_norm": 1.0577547550201416,
"learning_rate": 2.6634940128953022e-05,
"loss": 4.2009,
"step": 4800
},
{
"epoch": 4.42617680826636,
"grad_norm": 1.1174390316009521,
"learning_rate": 2.6327909118821003e-05,
"loss": 4.22,
"step": 4820
},
{
"epoch": 4.444546498277841,
"grad_norm": 1.0326007604599,
"learning_rate": 2.6020878108688977e-05,
"loss": 4.2575,
"step": 4840
},
{
"epoch": 4.462916188289323,
"grad_norm": 1.172747254371643,
"learning_rate": 2.5713847098556952e-05,
"loss": 4.1741,
"step": 4860
},
{
"epoch": 4.481285878300803,
"grad_norm": 1.2684015035629272,
"learning_rate": 2.5406816088424933e-05,
"loss": 4.2803,
"step": 4880
},
{
"epoch": 4.499655568312285,
"grad_norm": 1.3229318857192993,
"learning_rate": 2.5099785078292907e-05,
"loss": 4.2587,
"step": 4900
},
{
"epoch": 4.518025258323766,
"grad_norm": 1.0893479585647583,
"learning_rate": 2.4792754068160885e-05,
"loss": 4.1393,
"step": 4920
},
{
"epoch": 4.536394948335246,
"grad_norm": 1.18522310256958,
"learning_rate": 2.4485723058028863e-05,
"loss": 4.2688,
"step": 4940
},
{
"epoch": 4.554764638346728,
"grad_norm": 1.2020633220672607,
"learning_rate": 2.417869204789684e-05,
"loss": 4.1873,
"step": 4960
},
{
"epoch": 4.573134328358209,
"grad_norm": 1.0598649978637695,
"learning_rate": 2.3871661037764815e-05,
"loss": 4.1961,
"step": 4980
},
{
"epoch": 4.59150401836969,
"grad_norm": 1.320818305015564,
"learning_rate": 2.3564630027632793e-05,
"loss": 4.2311,
"step": 5000
},
{
"epoch": 4.609873708381171,
"grad_norm": 1.1840718984603882,
"learning_rate": 2.325759901750077e-05,
"loss": 4.1921,
"step": 5020
},
{
"epoch": 4.628243398392652,
"grad_norm": 1.0860955715179443,
"learning_rate": 2.2950568007368745e-05,
"loss": 4.1796,
"step": 5040
},
{
"epoch": 4.646613088404133,
"grad_norm": 1.1883424520492554,
"learning_rate": 2.2643536997236723e-05,
"loss": 4.2878,
"step": 5060
},
{
"epoch": 4.664982778415614,
"grad_norm": 1.2330527305603027,
"learning_rate": 2.23365059871047e-05,
"loss": 4.2017,
"step": 5080
},
{
"epoch": 4.6833524684270955,
"grad_norm": 1.153104543685913,
"learning_rate": 2.2029474976972675e-05,
"loss": 4.2254,
"step": 5100
},
{
"epoch": 4.701722158438576,
"grad_norm": 1.078261137008667,
"learning_rate": 2.1722443966840653e-05,
"loss": 4.2249,
"step": 5120
},
{
"epoch": 4.720091848450057,
"grad_norm": 1.3064528703689575,
"learning_rate": 2.1415412956708628e-05,
"loss": 4.1816,
"step": 5140
},
{
"epoch": 4.7384615384615385,
"grad_norm": 1.1915974617004395,
"learning_rate": 2.1108381946576606e-05,
"loss": 4.1787,
"step": 5160
},
{
"epoch": 4.756831228473019,
"grad_norm": 1.1211307048797607,
"learning_rate": 2.0801350936444583e-05,
"loss": 4.2094,
"step": 5180
},
{
"epoch": 4.775200918484501,
"grad_norm": 1.2985018491744995,
"learning_rate": 2.0494319926312558e-05,
"loss": 4.2306,
"step": 5200
},
{
"epoch": 4.7935706084959815,
"grad_norm": 1.0229589939117432,
"learning_rate": 2.0187288916180536e-05,
"loss": 4.2341,
"step": 5220
},
{
"epoch": 4.811940298507462,
"grad_norm": 1.3006722927093506,
"learning_rate": 1.988025790604851e-05,
"loss": 4.2555,
"step": 5240
},
{
"epoch": 4.830309988518944,
"grad_norm": 1.1146833896636963,
"learning_rate": 1.9573226895916488e-05,
"loss": 4.1993,
"step": 5260
},
{
"epoch": 4.8486796785304245,
"grad_norm": 1.0311157703399658,
"learning_rate": 1.9266195885784466e-05,
"loss": 4.2419,
"step": 5280
},
{
"epoch": 4.867049368541906,
"grad_norm": 1.187200665473938,
"learning_rate": 1.895916487565244e-05,
"loss": 4.1751,
"step": 5300
},
{
"epoch": 4.885419058553387,
"grad_norm": 1.1463189125061035,
"learning_rate": 1.8652133865520418e-05,
"loss": 4.2724,
"step": 5320
},
{
"epoch": 4.903788748564868,
"grad_norm": 1.110522747039795,
"learning_rate": 1.8345102855388396e-05,
"loss": 4.2305,
"step": 5340
},
{
"epoch": 4.922158438576349,
"grad_norm": 1.139212727546692,
"learning_rate": 1.803807184525637e-05,
"loss": 4.1795,
"step": 5360
},
{
"epoch": 4.94052812858783,
"grad_norm": 1.0388927459716797,
"learning_rate": 1.7731040835124348e-05,
"loss": 4.1886,
"step": 5380
},
{
"epoch": 4.958897818599311,
"grad_norm": 1.5335136651992798,
"learning_rate": 1.7424009824992322e-05,
"loss": 4.2074,
"step": 5400
},
{
"epoch": 4.977267508610792,
"grad_norm": 1.2070115804672241,
"learning_rate": 1.71169788148603e-05,
"loss": 4.2166,
"step": 5420
},
{
"epoch": 4.995637198622274,
"grad_norm": 1.0662022829055786,
"learning_rate": 1.6809947804728278e-05,
"loss": 4.2369,
"step": 5440
},
{
"epoch": 5.0,
"eval_loss": 5.327496528625488,
"eval_runtime": 160.2538,
"eval_samples_per_second": 6.04,
"eval_steps_per_second": 3.02,
"step": 5445
}
],
"logging_steps": 20,
"max_steps": 6534,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 4.192414285567118e+16,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}