21_P / trainer_state.json
liyang619's picture
Upload folder using huggingface_hub
ebdd41a verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 9.9906407487401,
"eval_steps": 500,
"global_step": 10410,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.04799616030717543,
"grad_norm": 0.056359845956996786,
"learning_rate": 1.9903938520653218e-05,
"loss": 0.1372,
"step": 50
},
{
"epoch": 0.09599232061435085,
"grad_norm": 0.04530843606021861,
"learning_rate": 1.9807877041306437e-05,
"loss": 0.0026,
"step": 100
},
{
"epoch": 0.14398848092152627,
"grad_norm": 0.048800263548022,
"learning_rate": 1.9711815561959656e-05,
"loss": 0.0016,
"step": 150
},
{
"epoch": 0.1919846412287017,
"grad_norm": 0.028926716536897153,
"learning_rate": 1.9615754082612875e-05,
"loss": 0.0009,
"step": 200
},
{
"epoch": 0.23998080153587714,
"grad_norm": 0.02483660911842089,
"learning_rate": 1.951969260326609e-05,
"loss": 0.0006,
"step": 250
},
{
"epoch": 0.28797696184305255,
"grad_norm": 0.031592169119638684,
"learning_rate": 1.942363112391931e-05,
"loss": 0.0004,
"step": 300
},
{
"epoch": 0.33597312215022795,
"grad_norm": 0.016503212175038827,
"learning_rate": 1.932756964457253e-05,
"loss": 0.0003,
"step": 350
},
{
"epoch": 0.3839692824574034,
"grad_norm": 0.010211019128745821,
"learning_rate": 1.9231508165225746e-05,
"loss": 0.0002,
"step": 400
},
{
"epoch": 0.4319654427645788,
"grad_norm": 0.009707429867388344,
"learning_rate": 1.9135446685878965e-05,
"loss": 0.0002,
"step": 450
},
{
"epoch": 0.4799616030717543,
"grad_norm": 0.009954199033185984,
"learning_rate": 1.903938520653218e-05,
"loss": 0.0001,
"step": 500
},
{
"epoch": 0.5279577633789296,
"grad_norm": 0.03019176505404075,
"learning_rate": 1.89433237271854e-05,
"loss": 0.0001,
"step": 550
},
{
"epoch": 0.5759539236861051,
"grad_norm": 0.017169092945345278,
"learning_rate": 1.884726224783862e-05,
"loss": 0.0002,
"step": 600
},
{
"epoch": 0.6239500839932806,
"grad_norm": 0.014050969174899489,
"learning_rate": 1.875120076849184e-05,
"loss": 0.0002,
"step": 650
},
{
"epoch": 0.6719462443004559,
"grad_norm": 0.01214360409818984,
"learning_rate": 1.8655139289145054e-05,
"loss": 0.0001,
"step": 700
},
{
"epoch": 0.7199424046076314,
"grad_norm": 0.007490998342783297,
"learning_rate": 1.855907780979827e-05,
"loss": 0.0001,
"step": 750
},
{
"epoch": 0.7679385649148068,
"grad_norm": 0.01472168117986472,
"learning_rate": 1.846301633045149e-05,
"loss": 0.0001,
"step": 800
},
{
"epoch": 0.8159347252219823,
"grad_norm": 0.004822963008007148,
"learning_rate": 1.836695485110471e-05,
"loss": 0.0001,
"step": 850
},
{
"epoch": 0.8639308855291576,
"grad_norm": 0.007864662209371043,
"learning_rate": 1.8270893371757928e-05,
"loss": 0.0001,
"step": 900
},
{
"epoch": 0.9119270458363331,
"grad_norm": 0.0037532241969519355,
"learning_rate": 1.8174831892411144e-05,
"loss": 0.0,
"step": 950
},
{
"epoch": 0.9599232061435086,
"grad_norm": 0.025225633083551968,
"learning_rate": 1.8078770413064363e-05,
"loss": 0.0001,
"step": 1000
},
{
"epoch": 1.0076793856491482,
"grad_norm": 0.0070698857815611905,
"learning_rate": 1.7982708933717582e-05,
"loss": 0.0001,
"step": 1050
},
{
"epoch": 1.0556755459563234,
"grad_norm": 0.0077929016224052095,
"learning_rate": 1.78866474543708e-05,
"loss": 0.0001,
"step": 1100
},
{
"epoch": 1.1036717062634989,
"grad_norm": 0.0031971035982334005,
"learning_rate": 1.7790585975024018e-05,
"loss": 0.0,
"step": 1150
},
{
"epoch": 1.1516678665706743,
"grad_norm": 0.0008553018029932224,
"learning_rate": 1.7694524495677234e-05,
"loss": 0.0,
"step": 1200
},
{
"epoch": 1.1996640268778498,
"grad_norm": 0.009629377240548386,
"learning_rate": 1.7598463016330453e-05,
"loss": 0.0,
"step": 1250
},
{
"epoch": 1.2476601871850252,
"grad_norm": 0.015633152594491845,
"learning_rate": 1.7502401536983672e-05,
"loss": 0.0001,
"step": 1300
},
{
"epoch": 1.2956563474922005,
"grad_norm": 0.006061806879093744,
"learning_rate": 1.7406340057636888e-05,
"loss": 0.0001,
"step": 1350
},
{
"epoch": 1.343652507799376,
"grad_norm": 0.014919415942150202,
"learning_rate": 1.7310278578290107e-05,
"loss": 0.0001,
"step": 1400
},
{
"epoch": 1.3916486681065514,
"grad_norm": 0.007840820222323987,
"learning_rate": 1.7214217098943323e-05,
"loss": 0.0001,
"step": 1450
},
{
"epoch": 1.4396448284137269,
"grad_norm": 0.010102899473387241,
"learning_rate": 1.7118155619596542e-05,
"loss": 0.0001,
"step": 1500
},
{
"epoch": 1.4876409887209023,
"grad_norm": 0.00821469211056164,
"learning_rate": 1.702209414024976e-05,
"loss": 0.0001,
"step": 1550
},
{
"epoch": 1.5356371490280778,
"grad_norm": 0.006461620366133209,
"learning_rate": 1.692603266090298e-05,
"loss": 0.0,
"step": 1600
},
{
"epoch": 1.5836333093352533,
"grad_norm": 0.0035233894828334,
"learning_rate": 1.6829971181556197e-05,
"loss": 0.0,
"step": 1650
},
{
"epoch": 1.6316294696424287,
"grad_norm": 0.0013366005919697563,
"learning_rate": 1.6733909702209416e-05,
"loss": 0.0,
"step": 1700
},
{
"epoch": 1.6796256299496042,
"grad_norm": 0.0008247202586656663,
"learning_rate": 1.6637848222862635e-05,
"loss": 0.0,
"step": 1750
},
{
"epoch": 1.7276217902567794,
"grad_norm": 0.00138812261064544,
"learning_rate": 1.654178674351585e-05,
"loss": 0.0,
"step": 1800
},
{
"epoch": 1.7756179505639549,
"grad_norm": 0.0019404034686127678,
"learning_rate": 1.644572526416907e-05,
"loss": 0.0,
"step": 1850
},
{
"epoch": 1.8236141108711303,
"grad_norm": 0.0010990642036531632,
"learning_rate": 1.6349663784822286e-05,
"loss": 0.0,
"step": 1900
},
{
"epoch": 1.8716102711783056,
"grad_norm": 0.0017740362762336732,
"learning_rate": 1.6253602305475506e-05,
"loss": 0.0,
"step": 1950
},
{
"epoch": 1.919606431485481,
"grad_norm": 0.001563612144579101,
"learning_rate": 1.6157540826128725e-05,
"loss": 0.0,
"step": 2000
},
{
"epoch": 1.9676025917926565,
"grad_norm": 0.0018909757453378412,
"learning_rate": 1.606147934678194e-05,
"loss": 0.0,
"step": 2050
},
{
"epoch": 2.0153587712982963,
"grad_norm": 0.0006439448364579109,
"learning_rate": 1.596541786743516e-05,
"loss": 0.0,
"step": 2100
},
{
"epoch": 2.0633549316054713,
"grad_norm": 0.0032895928351192056,
"learning_rate": 1.5869356388088376e-05,
"loss": 0.0,
"step": 2150
},
{
"epoch": 2.111351091912647,
"grad_norm": 0.0027251031542751473,
"learning_rate": 1.5773294908741595e-05,
"loss": 0.0,
"step": 2200
},
{
"epoch": 2.1593472522198223,
"grad_norm": 0.0008593470077862425,
"learning_rate": 1.5677233429394814e-05,
"loss": 0.0,
"step": 2250
},
{
"epoch": 2.2073434125269977,
"grad_norm": 3.458426400791139e-05,
"learning_rate": 1.5581171950048034e-05,
"loss": 0.0,
"step": 2300
},
{
"epoch": 2.255339572834173,
"grad_norm": 0.000981214542420656,
"learning_rate": 1.548511047070125e-05,
"loss": 0.0,
"step": 2350
},
{
"epoch": 2.3033357331413487,
"grad_norm": 0.001124174063701936,
"learning_rate": 1.538904899135447e-05,
"loss": 0.0,
"step": 2400
},
{
"epoch": 2.351331893448524,
"grad_norm": 0.0013095861126641674,
"learning_rate": 1.5292987512007688e-05,
"loss": 0.0,
"step": 2450
},
{
"epoch": 2.3993280537556996,
"grad_norm": 0.0015182053160101795,
"learning_rate": 1.5196926032660904e-05,
"loss": 0.0,
"step": 2500
},
{
"epoch": 2.447324214062875,
"grad_norm": 0.001969005538900658,
"learning_rate": 1.5100864553314123e-05,
"loss": 0.0,
"step": 2550
},
{
"epoch": 2.4953203743700505,
"grad_norm": 2.6145928957516364e-05,
"learning_rate": 1.500480307396734e-05,
"loss": 0.0,
"step": 2600
},
{
"epoch": 2.543316534677226,
"grad_norm": 0.0021796883079272097,
"learning_rate": 1.490874159462056e-05,
"loss": 0.0,
"step": 2650
},
{
"epoch": 2.591312694984401,
"grad_norm": 0.0007661812983237169,
"learning_rate": 1.4812680115273776e-05,
"loss": 0.0,
"step": 2700
},
{
"epoch": 2.639308855291577,
"grad_norm": 0.0003085627432685896,
"learning_rate": 1.4716618635926993e-05,
"loss": 0.0,
"step": 2750
},
{
"epoch": 2.687305015598752,
"grad_norm": 0.00023846879150445877,
"learning_rate": 1.4620557156580213e-05,
"loss": 0.0,
"step": 2800
},
{
"epoch": 2.7353011759059274,
"grad_norm": 0.0012522127595133068,
"learning_rate": 1.452449567723343e-05,
"loss": 0.0,
"step": 2850
},
{
"epoch": 2.783297336213103,
"grad_norm": 0.0008184248138162355,
"learning_rate": 1.442843419788665e-05,
"loss": 0.0,
"step": 2900
},
{
"epoch": 2.8312934965202783,
"grad_norm": 0.01180670902661138,
"learning_rate": 1.4332372718539867e-05,
"loss": 0.0001,
"step": 2950
},
{
"epoch": 2.8792896568274537,
"grad_norm": 0.016217542686608223,
"learning_rate": 1.4236311239193086e-05,
"loss": 0.0002,
"step": 3000
},
{
"epoch": 2.927285817134629,
"grad_norm": 0.010646818079507889,
"learning_rate": 1.4140249759846302e-05,
"loss": 0.0002,
"step": 3050
},
{
"epoch": 2.9752819774418047,
"grad_norm": 0.007196097859297863,
"learning_rate": 1.404418828049952e-05,
"loss": 0.0001,
"step": 3100
},
{
"epoch": 3.023038156947444,
"grad_norm": 0.007568758416020564,
"learning_rate": 1.3948126801152739e-05,
"loss": 0.0001,
"step": 3150
},
{
"epoch": 3.0710343172546195,
"grad_norm": 0.006390579698931919,
"learning_rate": 1.3852065321805957e-05,
"loss": 0.0001,
"step": 3200
},
{
"epoch": 3.119030477561795,
"grad_norm": 0.010734275211685794,
"learning_rate": 1.3756003842459176e-05,
"loss": 0.0,
"step": 3250
},
{
"epoch": 3.1670266378689704,
"grad_norm": 0.004630534766497509,
"learning_rate": 1.3659942363112394e-05,
"loss": 0.0,
"step": 3300
},
{
"epoch": 3.215022798176146,
"grad_norm": 0.0005952342568898498,
"learning_rate": 1.3563880883765613e-05,
"loss": 0.0,
"step": 3350
},
{
"epoch": 3.2630189584833214,
"grad_norm": 0.0027613516647845954,
"learning_rate": 1.3467819404418829e-05,
"loss": 0.0,
"step": 3400
},
{
"epoch": 3.311015118790497,
"grad_norm": 0.0013039145722702529,
"learning_rate": 1.3371757925072046e-05,
"loss": 0.0,
"step": 3450
},
{
"epoch": 3.3590112790976723,
"grad_norm": 0.0009283601577810141,
"learning_rate": 1.3275696445725266e-05,
"loss": 0.0,
"step": 3500
},
{
"epoch": 3.4070074394048477,
"grad_norm": 0.0006436371717338523,
"learning_rate": 1.3179634966378483e-05,
"loss": 0.0,
"step": 3550
},
{
"epoch": 3.455003599712023,
"grad_norm": 6.599953012370843e-05,
"learning_rate": 1.3083573487031702e-05,
"loss": 0.0,
"step": 3600
},
{
"epoch": 3.5029997600191987,
"grad_norm": 6.36973670822645e-05,
"learning_rate": 1.298751200768492e-05,
"loss": 0.0,
"step": 3650
},
{
"epoch": 3.5509959203263737,
"grad_norm": 0.0009200029805350018,
"learning_rate": 1.2891450528338136e-05,
"loss": 0.0,
"step": 3700
},
{
"epoch": 3.5989920806335496,
"grad_norm": 0.001151118667632979,
"learning_rate": 1.2795389048991355e-05,
"loss": 0.0,
"step": 3750
},
{
"epoch": 3.6469882409407246,
"grad_norm": 0.00037121111713653293,
"learning_rate": 1.2699327569644573e-05,
"loss": 0.0,
"step": 3800
},
{
"epoch": 3.6949844012479,
"grad_norm": 0.0021648858121127925,
"learning_rate": 1.2603266090297792e-05,
"loss": 0.0,
"step": 3850
},
{
"epoch": 3.7429805615550755,
"grad_norm": 0.001600258464510773,
"learning_rate": 1.250720461095101e-05,
"loss": 0.0,
"step": 3900
},
{
"epoch": 3.790976721862251,
"grad_norm": 0.0013961604090552233,
"learning_rate": 1.2411143131604229e-05,
"loss": 0.0,
"step": 3950
},
{
"epoch": 3.8389728821694264,
"grad_norm": 0.00048475558523138784,
"learning_rate": 1.2315081652257446e-05,
"loss": 0.0,
"step": 4000
},
{
"epoch": 3.886969042476602,
"grad_norm": 0.0017333329320903196,
"learning_rate": 1.2219020172910662e-05,
"loss": 0.0,
"step": 4050
},
{
"epoch": 3.9349652027837774,
"grad_norm": 0.010707171803579559,
"learning_rate": 1.2122958693563881e-05,
"loss": 0.0001,
"step": 4100
},
{
"epoch": 3.982961363090953,
"grad_norm": 0.006925594566044665,
"learning_rate": 1.2026897214217099e-05,
"loss": 0.0001,
"step": 4150
},
{
"epoch": 4.030717542596593,
"grad_norm": 0.01062801757661266,
"learning_rate": 1.1930835734870318e-05,
"loss": 0.0001,
"step": 4200
},
{
"epoch": 4.078713702903768,
"grad_norm": 2.7264394254701005,
"learning_rate": 1.1834774255523536e-05,
"loss": 0.0425,
"step": 4250
},
{
"epoch": 4.126709863210943,
"grad_norm": 2.656886510026864,
"learning_rate": 1.1738712776176755e-05,
"loss": 0.0154,
"step": 4300
},
{
"epoch": 4.174706023518119,
"grad_norm": 0.007126911273931725,
"learning_rate": 1.1642651296829973e-05,
"loss": 0.0028,
"step": 4350
},
{
"epoch": 4.222702183825294,
"grad_norm": 0.00035477378217495665,
"learning_rate": 1.1546589817483189e-05,
"loss": 0.0,
"step": 4400
},
{
"epoch": 4.2706983441324695,
"grad_norm": 0.0019944039322872796,
"learning_rate": 1.1450528338136408e-05,
"loss": 0.0,
"step": 4450
},
{
"epoch": 4.3186945044396445,
"grad_norm": 0.0027298287000846375,
"learning_rate": 1.1354466858789625e-05,
"loss": 0.0,
"step": 4500
},
{
"epoch": 4.36669066474682,
"grad_norm": 0.003286962475487846,
"learning_rate": 1.1258405379442845e-05,
"loss": 0.0,
"step": 4550
},
{
"epoch": 4.4146868250539955,
"grad_norm": 0.001365637121709503,
"learning_rate": 1.1162343900096062e-05,
"loss": 0.0,
"step": 4600
},
{
"epoch": 4.462682985361171,
"grad_norm": 0.00013215796860760568,
"learning_rate": 1.1066282420749282e-05,
"loss": 0.0,
"step": 4650
},
{
"epoch": 4.510679145668346,
"grad_norm": 0.0018098453195765278,
"learning_rate": 1.0970220941402499e-05,
"loss": 0.0,
"step": 4700
},
{
"epoch": 4.558675305975522,
"grad_norm": 0.003154487431789082,
"learning_rate": 1.0874159462055715e-05,
"loss": 0.0,
"step": 4750
},
{
"epoch": 4.606671466282697,
"grad_norm": 0.0012030613779208958,
"learning_rate": 1.0778097982708934e-05,
"loss": 0.0,
"step": 4800
},
{
"epoch": 4.654667626589873,
"grad_norm": 0.002984602213822321,
"learning_rate": 1.0682036503362152e-05,
"loss": 0.0,
"step": 4850
},
{
"epoch": 4.702663786897048,
"grad_norm": 0.0017107235374378526,
"learning_rate": 1.0585975024015371e-05,
"loss": 0.0,
"step": 4900
},
{
"epoch": 4.750659947204223,
"grad_norm": 0.0013479788753936498,
"learning_rate": 1.0489913544668589e-05,
"loss": 0.0,
"step": 4950
},
{
"epoch": 4.798656107511399,
"grad_norm": 0.0013971374940015167,
"learning_rate": 1.0393852065321808e-05,
"loss": 0.0,
"step": 5000
},
{
"epoch": 4.846652267818574,
"grad_norm": 0.0009174014462086613,
"learning_rate": 1.0297790585975025e-05,
"loss": 0.0,
"step": 5050
},
{
"epoch": 4.89464842812575,
"grad_norm": 0.00021319385906492494,
"learning_rate": 1.0201729106628241e-05,
"loss": 0.0,
"step": 5100
},
{
"epoch": 4.942644588432925,
"grad_norm": 0.0003964928400635538,
"learning_rate": 1.010566762728146e-05,
"loss": 0.0,
"step": 5150
},
{
"epoch": 4.990640748740101,
"grad_norm": 0.0007846693600976504,
"learning_rate": 1.0009606147934678e-05,
"loss": 0.0,
"step": 5200
},
{
"epoch": 5.03839692824574,
"grad_norm": 0.001261614819050779,
"learning_rate": 9.913544668587897e-06,
"loss": 0.0,
"step": 5250
},
{
"epoch": 5.086393088552915,
"grad_norm": 0.0012098325775649194,
"learning_rate": 9.817483189241115e-06,
"loss": 0.0,
"step": 5300
},
{
"epoch": 5.134389248860091,
"grad_norm": 0.0014591579618327688,
"learning_rate": 9.721421709894333e-06,
"loss": 0.0,
"step": 5350
},
{
"epoch": 5.182385409167266,
"grad_norm": 0.0016037268795767395,
"learning_rate": 9.625360230547552e-06,
"loss": 0.0,
"step": 5400
},
{
"epoch": 5.230381569474442,
"grad_norm": 0.0005212598226700765,
"learning_rate": 9.52929875120077e-06,
"loss": 0.0,
"step": 5450
},
{
"epoch": 5.278377729781617,
"grad_norm": 0.0010553934511595387,
"learning_rate": 9.433237271853987e-06,
"loss": 0.0,
"step": 5500
},
{
"epoch": 5.326373890088793,
"grad_norm": 0.00043323667380786225,
"learning_rate": 9.337175792507205e-06,
"loss": 0.0,
"step": 5550
},
{
"epoch": 5.374370050395968,
"grad_norm": 0.0012194286751975318,
"learning_rate": 9.241114313160424e-06,
"loss": 0.0,
"step": 5600
},
{
"epoch": 5.422366210703144,
"grad_norm": 0.000637742719517836,
"learning_rate": 9.145052833813641e-06,
"loss": 0.0,
"step": 5650
},
{
"epoch": 5.470362371010319,
"grad_norm": 4.325815795263492e-05,
"learning_rate": 9.048991354466859e-06,
"loss": 0.0,
"step": 5700
},
{
"epoch": 5.518358531317495,
"grad_norm": 0.0016312939508974279,
"learning_rate": 8.952929875120078e-06,
"loss": 0.0,
"step": 5750
},
{
"epoch": 5.56635469162467,
"grad_norm": 0.0012246180978586572,
"learning_rate": 8.856868395773296e-06,
"loss": 0.0,
"step": 5800
},
{
"epoch": 5.614350851931846,
"grad_norm": 0.0003665920238881759,
"learning_rate": 8.760806916426513e-06,
"loss": 0.0,
"step": 5850
},
{
"epoch": 5.662347012239021,
"grad_norm": 0.0012525412890931756,
"learning_rate": 8.664745437079731e-06,
"loss": 0.0,
"step": 5900
},
{
"epoch": 5.710343172546196,
"grad_norm": 0.0017253082668870557,
"learning_rate": 8.56868395773295e-06,
"loss": 0.0,
"step": 5950
},
{
"epoch": 5.758339332853372,
"grad_norm": 2.167483489099867e-05,
"learning_rate": 8.472622478386168e-06,
"loss": 0.0,
"step": 6000
},
{
"epoch": 5.806335493160547,
"grad_norm": 0.002300377575504565,
"learning_rate": 8.376560999039385e-06,
"loss": 0.0,
"step": 6050
},
{
"epoch": 5.854331653467723,
"grad_norm": 0.001247309342311816,
"learning_rate": 8.280499519692605e-06,
"loss": 0.0,
"step": 6100
},
{
"epoch": 5.902327813774898,
"grad_norm": 0.0018508173770747623,
"learning_rate": 8.184438040345822e-06,
"loss": 0.0,
"step": 6150
},
{
"epoch": 5.950323974082074,
"grad_norm": 2.8314353686097232e-05,
"learning_rate": 8.08837656099904e-06,
"loss": 0.0,
"step": 6200
},
{
"epoch": 5.998320134389249,
"grad_norm": 0.0008053281023898595,
"learning_rate": 7.992315081652257e-06,
"loss": 0.0,
"step": 6250
},
{
"epoch": 6.046076313894888,
"grad_norm": 0.000246511018384193,
"learning_rate": 7.896253602305477e-06,
"loss": 0.0,
"step": 6300
},
{
"epoch": 6.094072474202064,
"grad_norm": 0.001862683974136238,
"learning_rate": 7.800192122958694e-06,
"loss": 0.0,
"step": 6350
},
{
"epoch": 6.142068634509239,
"grad_norm": 0.0010864233898034475,
"learning_rate": 7.704130643611912e-06,
"loss": 0.0,
"step": 6400
},
{
"epoch": 6.190064794816415,
"grad_norm": 0.0011081212998509966,
"learning_rate": 7.60806916426513e-06,
"loss": 0.0,
"step": 6450
},
{
"epoch": 6.23806095512359,
"grad_norm": 0.0004987385497892048,
"learning_rate": 7.512007684918349e-06,
"loss": 0.0,
"step": 6500
},
{
"epoch": 6.286057115430766,
"grad_norm": 0.0006690934909146146,
"learning_rate": 7.415946205571566e-06,
"loss": 0.0,
"step": 6550
},
{
"epoch": 6.334053275737941,
"grad_norm": 0.0005715255066628584,
"learning_rate": 7.319884726224784e-06,
"loss": 0.0,
"step": 6600
},
{
"epoch": 6.382049436045117,
"grad_norm": 0.0002868542568900514,
"learning_rate": 7.223823246878002e-06,
"loss": 0.0,
"step": 6650
},
{
"epoch": 6.430045596352292,
"grad_norm": 0.0012799016530399359,
"learning_rate": 7.127761767531221e-06,
"loss": 0.0,
"step": 6700
},
{
"epoch": 6.478041756659467,
"grad_norm": 0.0006603786706831753,
"learning_rate": 7.031700288184439e-06,
"loss": 0.0,
"step": 6750
},
{
"epoch": 6.526037916966643,
"grad_norm": 0.0018370243979159682,
"learning_rate": 6.935638808837657e-06,
"loss": 0.0,
"step": 6800
},
{
"epoch": 6.574034077273818,
"grad_norm": 0.0005642321026490272,
"learning_rate": 6.839577329490875e-06,
"loss": 0.0,
"step": 6850
},
{
"epoch": 6.622030237580994,
"grad_norm": 0.0009876585977881267,
"learning_rate": 6.743515850144093e-06,
"loss": 0.0,
"step": 6900
},
{
"epoch": 6.670026397888169,
"grad_norm": 1.1312770861231675e-05,
"learning_rate": 6.64745437079731e-06,
"loss": 0.0,
"step": 6950
},
{
"epoch": 6.7180225581953446,
"grad_norm": 0.001873679763596067,
"learning_rate": 6.551392891450529e-06,
"loss": 0.0,
"step": 7000
},
{
"epoch": 6.76601871850252,
"grad_norm": 0.0010621236657186197,
"learning_rate": 6.455331412103747e-06,
"loss": 0.0,
"step": 7050
},
{
"epoch": 6.8140148788096955,
"grad_norm": 0.0005599241094897003,
"learning_rate": 6.359269932756965e-06,
"loss": 0.0,
"step": 7100
},
{
"epoch": 6.8620110391168705,
"grad_norm": 0.0020452728713900425,
"learning_rate": 6.263208453410183e-06,
"loss": 0.0,
"step": 7150
},
{
"epoch": 6.910007199424046,
"grad_norm": 0.0006276694590083033,
"learning_rate": 6.167146974063401e-06,
"loss": 0.0,
"step": 7200
},
{
"epoch": 6.958003359731221,
"grad_norm": 1.0943867005432958e-05,
"learning_rate": 6.071085494716619e-06,
"loss": 0.0,
"step": 7250
},
{
"epoch": 7.005759539236861,
"grad_norm": 0.0003131943367904171,
"learning_rate": 5.9750240153698366e-06,
"loss": 0.0,
"step": 7300
},
{
"epoch": 7.053755699544037,
"grad_norm": 0.0013394420472302832,
"learning_rate": 5.878962536023055e-06,
"loss": 0.0,
"step": 7350
},
{
"epoch": 7.101751859851212,
"grad_norm": 0.0004515914279429353,
"learning_rate": 5.782901056676273e-06,
"loss": 0.0,
"step": 7400
},
{
"epoch": 7.149748020158388,
"grad_norm": 0.0012371776622873765,
"learning_rate": 5.686839577329492e-06,
"loss": 0.0,
"step": 7450
},
{
"epoch": 7.197744180465563,
"grad_norm": 0.0005462388990689112,
"learning_rate": 5.590778097982709e-06,
"loss": 0.0,
"step": 7500
},
{
"epoch": 7.2457403407727385,
"grad_norm": 0.0012197696193749939,
"learning_rate": 5.494716618635928e-06,
"loss": 0.0,
"step": 7550
},
{
"epoch": 7.293736501079914,
"grad_norm": 0.0009604127841121792,
"learning_rate": 5.398655139289145e-06,
"loss": 0.0,
"step": 7600
},
{
"epoch": 7.3417326613870895,
"grad_norm": 0.0025036340230321125,
"learning_rate": 5.302593659942363e-06,
"loss": 0.0,
"step": 7650
},
{
"epoch": 7.3897288216942645,
"grad_norm": 0.0005758966168119034,
"learning_rate": 5.206532180595581e-06,
"loss": 0.0,
"step": 7700
},
{
"epoch": 7.4377249820014395,
"grad_norm": 0.001229882649627972,
"learning_rate": 5.1104707012488e-06,
"loss": 0.0,
"step": 7750
},
{
"epoch": 7.485721142308615,
"grad_norm": 0.002120352213056407,
"learning_rate": 5.014409221902018e-06,
"loss": 0.0,
"step": 7800
},
{
"epoch": 7.53371730261579,
"grad_norm": 0.0005579557634673263,
"learning_rate": 4.918347742555236e-06,
"loss": 0.0,
"step": 7850
},
{
"epoch": 7.581713462922966,
"grad_norm": 0.0006140693671067026,
"learning_rate": 4.822286263208454e-06,
"loss": 0.0,
"step": 7900
},
{
"epoch": 7.629709623230141,
"grad_norm": 0.0020367094428932054,
"learning_rate": 4.726224783861672e-06,
"loss": 0.0,
"step": 7950
},
{
"epoch": 7.677705783537317,
"grad_norm": 0.0010086315815590796,
"learning_rate": 4.630163304514889e-06,
"loss": 0.0,
"step": 8000
},
{
"epoch": 7.725701943844492,
"grad_norm": 0.0023790402017036106,
"learning_rate": 4.534101825168108e-06,
"loss": 0.0,
"step": 8050
},
{
"epoch": 7.773698104151668,
"grad_norm": 0.0006267680798791626,
"learning_rate": 4.438040345821326e-06,
"loss": 0.0,
"step": 8100
},
{
"epoch": 7.821694264458843,
"grad_norm": 0.001438738793669524,
"learning_rate": 4.341978866474544e-06,
"loss": 0.0,
"step": 8150
},
{
"epoch": 7.869690424766019,
"grad_norm": 0.00031777905634207366,
"learning_rate": 4.245917387127762e-06,
"loss": 0.0,
"step": 8200
},
{
"epoch": 7.917686585073194,
"grad_norm": 0.0007442124946473978,
"learning_rate": 4.149855907780981e-06,
"loss": 0.0,
"step": 8250
},
{
"epoch": 7.96568274538037,
"grad_norm": 0.0007645340846509501,
"learning_rate": 4.053794428434198e-06,
"loss": 0.0,
"step": 8300
},
{
"epoch": 8.013438924886009,
"grad_norm": 0.0012016885090594615,
"learning_rate": 3.957732949087416e-06,
"loss": 0.0,
"step": 8350
},
{
"epoch": 8.061435085193185,
"grad_norm": 0.0010325640415925308,
"learning_rate": 3.861671469740634e-06,
"loss": 0.0,
"step": 8400
},
{
"epoch": 8.10943124550036,
"grad_norm": 6.604070979562978e-06,
"learning_rate": 3.7656099903938526e-06,
"loss": 0.0,
"step": 8450
},
{
"epoch": 8.157427405807535,
"grad_norm": 0.0012186936208813802,
"learning_rate": 3.66954851104707e-06,
"loss": 0.0,
"step": 8500
},
{
"epoch": 8.20542356611471,
"grad_norm": 6.490957752970515e-06,
"learning_rate": 3.5734870317002885e-06,
"loss": 0.0,
"step": 8550
},
{
"epoch": 8.253419726421885,
"grad_norm": 6.650211845109218e-06,
"learning_rate": 3.4774255523535065e-06,
"loss": 0.0,
"step": 8600
},
{
"epoch": 8.301415886729062,
"grad_norm": 7.749100946178552e-06,
"learning_rate": 3.381364073006724e-06,
"loss": 0.0,
"step": 8650
},
{
"epoch": 8.349412047036237,
"grad_norm": 0.0012898002261334904,
"learning_rate": 3.2853025936599425e-06,
"loss": 0.0,
"step": 8700
},
{
"epoch": 8.397408207343412,
"grad_norm": 0.0011575881798726088,
"learning_rate": 3.189241114313161e-06,
"loss": 0.0,
"step": 8750
},
{
"epoch": 8.445404367650587,
"grad_norm": 0.0011746103393340125,
"learning_rate": 3.093179634966379e-06,
"loss": 0.0,
"step": 8800
},
{
"epoch": 8.493400527957764,
"grad_norm": 0.0018617005826680572,
"learning_rate": 2.9971181556195965e-06,
"loss": 0.0,
"step": 8850
},
{
"epoch": 8.541396688264939,
"grad_norm": 0.0014304481942881476,
"learning_rate": 2.901056676272815e-06,
"loss": 0.0,
"step": 8900
},
{
"epoch": 8.589392848572114,
"grad_norm": 0.0014302646357683322,
"learning_rate": 2.804995196926033e-06,
"loss": 0.0,
"step": 8950
},
{
"epoch": 8.637389008879289,
"grad_norm": 0.0007299503239037535,
"learning_rate": 2.708933717579251e-06,
"loss": 0.0,
"step": 9000
},
{
"epoch": 8.685385169186466,
"grad_norm": 0.0012707768875989157,
"learning_rate": 2.612872238232469e-06,
"loss": 0.0,
"step": 9050
},
{
"epoch": 8.73338132949364,
"grad_norm": 0.0008222158799147773,
"learning_rate": 2.5168107588856873e-06,
"loss": 0.0,
"step": 9100
},
{
"epoch": 8.781377489800816,
"grad_norm": 0.002014998964020666,
"learning_rate": 2.420749279538905e-06,
"loss": 0.0,
"step": 9150
},
{
"epoch": 8.829373650107991,
"grad_norm": 0.0009001429474679398,
"learning_rate": 2.324687800192123e-06,
"loss": 0.0,
"step": 9200
},
{
"epoch": 8.877369810415168,
"grad_norm": 0.0003820911843140154,
"learning_rate": 2.2286263208453413e-06,
"loss": 0.0,
"step": 9250
},
{
"epoch": 8.925365970722343,
"grad_norm": 0.000856916441308612,
"learning_rate": 2.1325648414985593e-06,
"loss": 0.0,
"step": 9300
},
{
"epoch": 8.973362131029518,
"grad_norm": 0.0007761779486122437,
"learning_rate": 2.0365033621517773e-06,
"loss": 0.0,
"step": 9350
},
{
"epoch": 9.021118310535158,
"grad_norm": 0.0018792757159981181,
"learning_rate": 1.9404418828049953e-06,
"loss": 0.0,
"step": 9400
},
{
"epoch": 9.069114470842333,
"grad_norm": 0.00041481994448875394,
"learning_rate": 1.8443804034582133e-06,
"loss": 0.0,
"step": 9450
},
{
"epoch": 9.117110631149508,
"grad_norm": 1.1819367471960496e-05,
"learning_rate": 1.7483189241114315e-06,
"loss": 0.0,
"step": 9500
},
{
"epoch": 9.165106791456683,
"grad_norm": 0.0017368795816472195,
"learning_rate": 1.6522574447646495e-06,
"loss": 0.0,
"step": 9550
},
{
"epoch": 9.213102951763858,
"grad_norm": 0.0005401116249332524,
"learning_rate": 1.5561959654178677e-06,
"loss": 0.0,
"step": 9600
},
{
"epoch": 9.261099112071035,
"grad_norm": 0.0010785311265829687,
"learning_rate": 1.4601344860710855e-06,
"loss": 0.0,
"step": 9650
},
{
"epoch": 9.30909527237821,
"grad_norm": 0.0004670023483024189,
"learning_rate": 1.3640730067243035e-06,
"loss": 0.0,
"step": 9700
},
{
"epoch": 9.357091432685385,
"grad_norm": 0.0006055103030654768,
"learning_rate": 1.2680115273775217e-06,
"loss": 0.0,
"step": 9750
},
{
"epoch": 9.40508759299256,
"grad_norm": 0.0007597006922075478,
"learning_rate": 1.17195004803074e-06,
"loss": 0.0,
"step": 9800
},
{
"epoch": 9.453083753299737,
"grad_norm": 0.00024368041193667566,
"learning_rate": 1.0758885686839577e-06,
"loss": 0.0,
"step": 9850
},
{
"epoch": 9.501079913606912,
"grad_norm": 0.0016168416769906855,
"learning_rate": 9.79827089337176e-07,
"loss": 0.0,
"step": 9900
},
{
"epoch": 9.549076073914087,
"grad_norm": 0.0014595362051413555,
"learning_rate": 8.837656099903939e-07,
"loss": 0.0,
"step": 9950
},
{
"epoch": 9.597072234221262,
"grad_norm": 0.0019860720699641865,
"learning_rate": 7.87704130643612e-07,
"loss": 0.0,
"step": 10000
},
{
"epoch": 9.645068394528437,
"grad_norm": 0.0010079036592915303,
"learning_rate": 6.916426512968301e-07,
"loss": 0.0,
"step": 10050
},
{
"epoch": 9.693064554835614,
"grad_norm": 0.0018929108249355604,
"learning_rate": 5.955811719500481e-07,
"loss": 0.0,
"step": 10100
},
{
"epoch": 9.741060715142789,
"grad_norm": 0.0014581536648741872,
"learning_rate": 4.995196926032661e-07,
"loss": 0.0,
"step": 10150
},
{
"epoch": 9.789056875449964,
"grad_norm": 0.00042115371443740654,
"learning_rate": 4.034582132564842e-07,
"loss": 0.0,
"step": 10200
},
{
"epoch": 9.837053035757139,
"grad_norm": 0.0011505748269741243,
"learning_rate": 3.0739673390970224e-07,
"loss": 0.0,
"step": 10250
},
{
"epoch": 9.885049196064315,
"grad_norm": 0.0008081086815167396,
"learning_rate": 2.1133525456292026e-07,
"loss": 0.0,
"step": 10300
},
{
"epoch": 9.93304535637149,
"grad_norm": 0.0013881162963304977,
"learning_rate": 1.1527377521613833e-07,
"loss": 0.0,
"step": 10350
},
{
"epoch": 9.981041516678665,
"grad_norm": 0.0012547337111623964,
"learning_rate": 1.921229586935639e-08,
"loss": 0.0,
"step": 10400
}
],
"logging_steps": 50,
"max_steps": 10410,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.497989908103168e+16,
"train_batch_size": 6,
"trial_name": null,
"trial_params": null
}