21_B_noR / trainer_state.json
liyang619's picture
Upload folder using huggingface_hub
0d007a4 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 9.9906407487401,
"eval_steps": 500,
"global_step": 10410,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.04799616030717543,
"grad_norm": 0.40130703571807497,
"learning_rate": 1.9903938520653218e-05,
"loss": 0.5764,
"step": 50
},
{
"epoch": 0.09599232061435085,
"grad_norm": 0.11324276076184485,
"learning_rate": 1.9807877041306437e-05,
"loss": 0.0127,
"step": 100
},
{
"epoch": 0.14398848092152627,
"grad_norm": 0.23456596059371265,
"learning_rate": 1.9711815561959656e-05,
"loss": 0.0116,
"step": 150
},
{
"epoch": 0.1919846412287017,
"grad_norm": 0.2569857313493957,
"learning_rate": 1.9615754082612875e-05,
"loss": 0.0095,
"step": 200
},
{
"epoch": 0.23998080153587714,
"grad_norm": 0.10167221450624435,
"learning_rate": 1.951969260326609e-05,
"loss": 0.0076,
"step": 250
},
{
"epoch": 0.28797696184305255,
"grad_norm": 0.13709005203244679,
"learning_rate": 1.942363112391931e-05,
"loss": 0.0367,
"step": 300
},
{
"epoch": 0.33597312215022795,
"grad_norm": 0.13456998050952693,
"learning_rate": 1.932756964457253e-05,
"loss": 0.1598,
"step": 350
},
{
"epoch": 0.3839692824574034,
"grad_norm": 0.08226295462642544,
"learning_rate": 1.9231508165225746e-05,
"loss": 0.0027,
"step": 400
},
{
"epoch": 0.4319654427645788,
"grad_norm": 0.06582240959495433,
"learning_rate": 1.9135446685878965e-05,
"loss": 0.0018,
"step": 450
},
{
"epoch": 0.4799616030717543,
"grad_norm": 0.04844821194896289,
"learning_rate": 1.903938520653218e-05,
"loss": 0.0011,
"step": 500
},
{
"epoch": 0.5279577633789296,
"grad_norm": 0.044325792418002434,
"learning_rate": 1.89433237271854e-05,
"loss": 0.0009,
"step": 550
},
{
"epoch": 0.5759539236861051,
"grad_norm": 0.10193526439960446,
"learning_rate": 1.884726224783862e-05,
"loss": 0.0007,
"step": 600
},
{
"epoch": 0.6239500839932806,
"grad_norm": 0.04583696584769259,
"learning_rate": 1.875120076849184e-05,
"loss": 0.0004,
"step": 650
},
{
"epoch": 0.6719462443004559,
"grad_norm": 0.06212828060465461,
"learning_rate": 1.8655139289145054e-05,
"loss": 0.0004,
"step": 700
},
{
"epoch": 0.7199424046076314,
"grad_norm": 0.01671985656229097,
"learning_rate": 1.855907780979827e-05,
"loss": 0.0004,
"step": 750
},
{
"epoch": 0.7679385649148068,
"grad_norm": 0.056733991579267375,
"learning_rate": 1.846301633045149e-05,
"loss": 0.0003,
"step": 800
},
{
"epoch": 0.8159347252219823,
"grad_norm": 0.022189341606076773,
"learning_rate": 1.836695485110471e-05,
"loss": 0.0002,
"step": 850
},
{
"epoch": 0.8639308855291576,
"grad_norm": 0.03989407247769456,
"learning_rate": 1.8270893371757928e-05,
"loss": 0.0002,
"step": 900
},
{
"epoch": 0.9119270458363331,
"grad_norm": 0.04976712622552593,
"learning_rate": 1.8174831892411144e-05,
"loss": 0.0004,
"step": 950
},
{
"epoch": 0.9599232061435086,
"grad_norm": 0.03248372661822075,
"learning_rate": 1.8078770413064363e-05,
"loss": 0.0002,
"step": 1000
},
{
"epoch": 1.0076793856491482,
"grad_norm": 0.02828740375926497,
"learning_rate": 1.7982708933717582e-05,
"loss": 0.0001,
"step": 1050
},
{
"epoch": 1.0556755459563234,
"grad_norm": 0.006717843644377119,
"learning_rate": 1.78866474543708e-05,
"loss": 0.0001,
"step": 1100
},
{
"epoch": 1.1036717062634989,
"grad_norm": 0.036848122853791763,
"learning_rate": 1.7790585975024018e-05,
"loss": 0.0002,
"step": 1150
},
{
"epoch": 1.1516678665706743,
"grad_norm": 0.008937747902000345,
"learning_rate": 1.7694524495677234e-05,
"loss": 0.0003,
"step": 1200
},
{
"epoch": 1.1996640268778498,
"grad_norm": 0.020778290871257626,
"learning_rate": 1.7598463016330453e-05,
"loss": 0.0002,
"step": 1250
},
{
"epoch": 1.2476601871850252,
"grad_norm": 0.03489192231362561,
"learning_rate": 1.7502401536983672e-05,
"loss": 0.0002,
"step": 1300
},
{
"epoch": 1.2956563474922005,
"grad_norm": 0.02652070728203031,
"learning_rate": 1.7406340057636888e-05,
"loss": 0.0002,
"step": 1350
},
{
"epoch": 1.343652507799376,
"grad_norm": 0.045892370706542986,
"learning_rate": 1.7310278578290107e-05,
"loss": 0.0001,
"step": 1400
},
{
"epoch": 1.3916486681065514,
"grad_norm": 0.035567397148225946,
"learning_rate": 1.7214217098943323e-05,
"loss": 0.0002,
"step": 1450
},
{
"epoch": 1.4396448284137269,
"grad_norm": 0.058096008885593506,
"learning_rate": 1.7118155619596542e-05,
"loss": 0.0002,
"step": 1500
},
{
"epoch": 1.4876409887209023,
"grad_norm": 0.008041454254388401,
"learning_rate": 1.702209414024976e-05,
"loss": 0.0002,
"step": 1550
},
{
"epoch": 1.5356371490280778,
"grad_norm": 0.0292027637619852,
"learning_rate": 1.692603266090298e-05,
"loss": 0.0002,
"step": 1600
},
{
"epoch": 1.5836333093352533,
"grad_norm": 0.005579591422720289,
"learning_rate": 1.6829971181556197e-05,
"loss": 0.0001,
"step": 1650
},
{
"epoch": 1.6316294696424287,
"grad_norm": 0.009990798873515247,
"learning_rate": 1.6733909702209416e-05,
"loss": 0.0001,
"step": 1700
},
{
"epoch": 1.6796256299496042,
"grad_norm": 0.014982636474279437,
"learning_rate": 1.6637848222862635e-05,
"loss": 0.0001,
"step": 1750
},
{
"epoch": 1.7276217902567794,
"grad_norm": 0.000905839245421785,
"learning_rate": 1.654178674351585e-05,
"loss": 0.0,
"step": 1800
},
{
"epoch": 1.7756179505639549,
"grad_norm": 0.0002544939554230034,
"learning_rate": 1.644572526416907e-05,
"loss": 0.0,
"step": 1850
},
{
"epoch": 1.8236141108711303,
"grad_norm": 0.06486916181278644,
"learning_rate": 1.6349663784822286e-05,
"loss": 0.0,
"step": 1900
},
{
"epoch": 1.8716102711783056,
"grad_norm": 0.0057599671370632665,
"learning_rate": 1.6253602305475506e-05,
"loss": 0.0001,
"step": 1950
},
{
"epoch": 1.919606431485481,
"grad_norm": 0.005752196733099436,
"learning_rate": 1.6157540826128725e-05,
"loss": 0.0001,
"step": 2000
},
{
"epoch": 1.9676025917926565,
"grad_norm": 0.023915848219674633,
"learning_rate": 1.606147934678194e-05,
"loss": 0.0001,
"step": 2050
},
{
"epoch": 2.0153587712982963,
"grad_norm": 0.05763578493168801,
"learning_rate": 1.596541786743516e-05,
"loss": 0.0003,
"step": 2100
},
{
"epoch": 2.0633549316054713,
"grad_norm": 0.027286944195901858,
"learning_rate": 1.5869356388088376e-05,
"loss": 0.0003,
"step": 2150
},
{
"epoch": 2.111351091912647,
"grad_norm": 0.04509506204669888,
"learning_rate": 1.5773294908741595e-05,
"loss": 0.0003,
"step": 2200
},
{
"epoch": 2.1593472522198223,
"grad_norm": 0.004722357132824841,
"learning_rate": 1.5677233429394814e-05,
"loss": 0.0001,
"step": 2250
},
{
"epoch": 2.2073434125269977,
"grad_norm": 0.005947387139866805,
"learning_rate": 1.5581171950048034e-05,
"loss": 0.0001,
"step": 2300
},
{
"epoch": 2.255339572834173,
"grad_norm": 0.005554818550019868,
"learning_rate": 1.548511047070125e-05,
"loss": 0.0001,
"step": 2350
},
{
"epoch": 2.3033357331413487,
"grad_norm": 0.030961713316540428,
"learning_rate": 1.538904899135447e-05,
"loss": 0.0001,
"step": 2400
},
{
"epoch": 2.351331893448524,
"grad_norm": 0.0014604185577770585,
"learning_rate": 1.5292987512007688e-05,
"loss": 0.0001,
"step": 2450
},
{
"epoch": 2.3993280537556996,
"grad_norm": 0.0004144261900010019,
"learning_rate": 1.5196926032660904e-05,
"loss": 0.0001,
"step": 2500
},
{
"epoch": 2.447324214062875,
"grad_norm": 0.002577485157830577,
"learning_rate": 1.5100864553314123e-05,
"loss": 0.0,
"step": 2550
},
{
"epoch": 2.4953203743700505,
"grad_norm": 8.550173364887937e-05,
"learning_rate": 1.500480307396734e-05,
"loss": 0.0,
"step": 2600
},
{
"epoch": 2.543316534677226,
"grad_norm": 0.00010717851623242482,
"learning_rate": 1.490874159462056e-05,
"loss": 0.0,
"step": 2650
},
{
"epoch": 2.591312694984401,
"grad_norm": 6.369950713298052e-05,
"learning_rate": 1.4812680115273776e-05,
"loss": 0.0,
"step": 2700
},
{
"epoch": 2.639308855291577,
"grad_norm": 6.600999957200681e-05,
"learning_rate": 1.4716618635926993e-05,
"loss": 0.0,
"step": 2750
},
{
"epoch": 2.687305015598752,
"grad_norm": 5.603891638556813e-05,
"learning_rate": 1.4620557156580213e-05,
"loss": 0.0,
"step": 2800
},
{
"epoch": 2.7353011759059274,
"grad_norm": 5.2107869217466546e-05,
"learning_rate": 1.452449567723343e-05,
"loss": 0.0,
"step": 2850
},
{
"epoch": 2.783297336213103,
"grad_norm": 4.969774591359737e-05,
"learning_rate": 1.442843419788665e-05,
"loss": 0.0,
"step": 2900
},
{
"epoch": 2.8312934965202783,
"grad_norm": 4.826761656936383e-05,
"learning_rate": 1.4332372718539867e-05,
"loss": 0.0,
"step": 2950
},
{
"epoch": 2.8792896568274537,
"grad_norm": 4.519733338418928e-05,
"learning_rate": 1.4236311239193086e-05,
"loss": 0.0,
"step": 3000
},
{
"epoch": 2.927285817134629,
"grad_norm": 4.741329017367575e-05,
"learning_rate": 1.4140249759846302e-05,
"loss": 0.0,
"step": 3050
},
{
"epoch": 2.9752819774418047,
"grad_norm": 4.210582860046051e-05,
"learning_rate": 1.404418828049952e-05,
"loss": 0.0,
"step": 3100
},
{
"epoch": 3.023038156947444,
"grad_norm": 3.8983925072142975e-05,
"learning_rate": 1.3948126801152739e-05,
"loss": 0.0,
"step": 3150
},
{
"epoch": 3.0710343172546195,
"grad_norm": 3.9396581472833486e-05,
"learning_rate": 1.3852065321805957e-05,
"loss": 0.0,
"step": 3200
},
{
"epoch": 3.119030477561795,
"grad_norm": 3.820381409845616e-05,
"learning_rate": 1.3756003842459176e-05,
"loss": 0.0,
"step": 3250
},
{
"epoch": 3.1670266378689704,
"grad_norm": 3.402938327443867e-05,
"learning_rate": 1.3659942363112394e-05,
"loss": 0.0,
"step": 3300
},
{
"epoch": 3.215022798176146,
"grad_norm": 3.404815140582149e-05,
"learning_rate": 1.3563880883765613e-05,
"loss": 0.0,
"step": 3350
},
{
"epoch": 3.2630189584833214,
"grad_norm": 3.4664587679035554e-05,
"learning_rate": 1.3467819404418829e-05,
"loss": 0.0,
"step": 3400
},
{
"epoch": 3.311015118790497,
"grad_norm": 3.123961826433185e-05,
"learning_rate": 1.3371757925072046e-05,
"loss": 0.0,
"step": 3450
},
{
"epoch": 3.3590112790976723,
"grad_norm": 3.1537633813695165e-05,
"learning_rate": 1.3275696445725266e-05,
"loss": 0.0,
"step": 3500
},
{
"epoch": 3.4070074394048477,
"grad_norm": 3.0829916063519026e-05,
"learning_rate": 1.3179634966378483e-05,
"loss": 0.0,
"step": 3550
},
{
"epoch": 3.455003599712023,
"grad_norm": 3.0331381466507462e-05,
"learning_rate": 1.3083573487031702e-05,
"loss": 0.0,
"step": 3600
},
{
"epoch": 3.5029997600191987,
"grad_norm": 2.862417890171692e-05,
"learning_rate": 1.298751200768492e-05,
"loss": 0.0,
"step": 3650
},
{
"epoch": 3.5509959203263737,
"grad_norm": 2.8983804187072616e-05,
"learning_rate": 1.2891450528338136e-05,
"loss": 0.0,
"step": 3700
},
{
"epoch": 3.5989920806335496,
"grad_norm": 2.7664318469000653e-05,
"learning_rate": 1.2795389048991355e-05,
"loss": 0.0,
"step": 3750
},
{
"epoch": 3.6469882409407246,
"grad_norm": 2.691650921443681e-05,
"learning_rate": 1.2699327569644573e-05,
"loss": 0.0,
"step": 3800
},
{
"epoch": 3.6949844012479,
"grad_norm": 2.6560851276998494e-05,
"learning_rate": 1.2603266090297792e-05,
"loss": 0.0,
"step": 3850
},
{
"epoch": 3.7429805615550755,
"grad_norm": 2.5772439613691444e-05,
"learning_rate": 1.250720461095101e-05,
"loss": 0.0,
"step": 3900
},
{
"epoch": 3.790976721862251,
"grad_norm": 2.5304175412864484e-05,
"learning_rate": 1.2411143131604229e-05,
"loss": 0.0,
"step": 3950
},
{
"epoch": 3.8389728821694264,
"grad_norm": 2.4713440654887632e-05,
"learning_rate": 1.2315081652257446e-05,
"loss": 0.0,
"step": 4000
},
{
"epoch": 3.886969042476602,
"grad_norm": 2.3006175360288813e-05,
"learning_rate": 1.2219020172910662e-05,
"loss": 0.0,
"step": 4050
},
{
"epoch": 3.9349652027837774,
"grad_norm": 2.3716156177079835e-05,
"learning_rate": 1.2122958693563881e-05,
"loss": 0.0,
"step": 4100
},
{
"epoch": 3.982961363090953,
"grad_norm": 2.1533976239454895e-05,
"learning_rate": 1.2026897214217099e-05,
"loss": 0.0,
"step": 4150
},
{
"epoch": 4.030717542596593,
"grad_norm": 2.108107822910467e-05,
"learning_rate": 1.1930835734870318e-05,
"loss": 0.0,
"step": 4200
},
{
"epoch": 4.078713702903768,
"grad_norm": 2.057632380572316e-05,
"learning_rate": 1.1834774255523536e-05,
"loss": 0.0,
"step": 4250
},
{
"epoch": 4.126709863210943,
"grad_norm": 1.9818844799806586e-05,
"learning_rate": 1.1738712776176755e-05,
"loss": 0.0,
"step": 4300
},
{
"epoch": 4.174706023518119,
"grad_norm": 2.000139447813578e-05,
"learning_rate": 1.1642651296829973e-05,
"loss": 0.0,
"step": 4350
},
{
"epoch": 4.222702183825294,
"grad_norm": 1.949942539557493e-05,
"learning_rate": 1.1546589817483189e-05,
"loss": 0.0,
"step": 4400
},
{
"epoch": 4.2706983441324695,
"grad_norm": 1.8867308741731532e-05,
"learning_rate": 1.1450528338136408e-05,
"loss": 0.0,
"step": 4450
},
{
"epoch": 4.3186945044396445,
"grad_norm": 1.9009462306487394e-05,
"learning_rate": 1.1354466858789625e-05,
"loss": 0.0,
"step": 4500
},
{
"epoch": 4.36669066474682,
"grad_norm": 1.8293579829862623e-05,
"learning_rate": 1.1258405379442845e-05,
"loss": 0.0,
"step": 4550
},
{
"epoch": 4.4146868250539955,
"grad_norm": 1.804770861290477e-05,
"learning_rate": 1.1162343900096062e-05,
"loss": 0.0,
"step": 4600
},
{
"epoch": 4.462682985361171,
"grad_norm": 1.7822617536522457e-05,
"learning_rate": 1.1066282420749282e-05,
"loss": 0.0,
"step": 4650
},
{
"epoch": 4.510679145668346,
"grad_norm": 1.7615347035744346e-05,
"learning_rate": 1.0970220941402499e-05,
"loss": 0.0,
"step": 4700
},
{
"epoch": 4.558675305975522,
"grad_norm": 1.6920804109234795e-05,
"learning_rate": 1.0874159462055715e-05,
"loss": 0.0,
"step": 4750
},
{
"epoch": 4.606671466282697,
"grad_norm": 1.668252580792227e-05,
"learning_rate": 1.0778097982708934e-05,
"loss": 0.0,
"step": 4800
},
{
"epoch": 4.654667626589873,
"grad_norm": 1.6864742262294465e-05,
"learning_rate": 1.0682036503362152e-05,
"loss": 0.0,
"step": 4850
},
{
"epoch": 4.702663786897048,
"grad_norm": 1.5542459286485215e-05,
"learning_rate": 1.0585975024015371e-05,
"loss": 0.0,
"step": 4900
},
{
"epoch": 4.750659947204223,
"grad_norm": 1.5404556001067005e-05,
"learning_rate": 1.0489913544668589e-05,
"loss": 0.0,
"step": 4950
},
{
"epoch": 4.798656107511399,
"grad_norm": 1.5272301989357907e-05,
"learning_rate": 1.0393852065321808e-05,
"loss": 0.0,
"step": 5000
},
{
"epoch": 4.846652267818574,
"grad_norm": 1.483108548373125e-05,
"learning_rate": 1.0297790585975025e-05,
"loss": 0.0,
"step": 5050
},
{
"epoch": 4.89464842812575,
"grad_norm": 1.4399855416621308e-05,
"learning_rate": 1.0201729106628241e-05,
"loss": 0.0,
"step": 5100
},
{
"epoch": 4.942644588432925,
"grad_norm": 1.4305520588733118e-05,
"learning_rate": 1.010566762728146e-05,
"loss": 0.0,
"step": 5150
},
{
"epoch": 4.990640748740101,
"grad_norm": 1.4698323816717522e-05,
"learning_rate": 1.0009606147934678e-05,
"loss": 0.0,
"step": 5200
},
{
"epoch": 5.03839692824574,
"grad_norm": 1.4271947775083709e-05,
"learning_rate": 9.913544668587897e-06,
"loss": 0.0,
"step": 5250
},
{
"epoch": 5.086393088552915,
"grad_norm": 1.4132258699606523e-05,
"learning_rate": 9.817483189241115e-06,
"loss": 0.0,
"step": 5300
},
{
"epoch": 5.134389248860091,
"grad_norm": 1.35025310085404e-05,
"learning_rate": 9.721421709894333e-06,
"loss": 0.0,
"step": 5350
},
{
"epoch": 5.182385409167266,
"grad_norm": 1.3373880287433798e-05,
"learning_rate": 9.625360230547552e-06,
"loss": 0.0,
"step": 5400
},
{
"epoch": 5.230381569474442,
"grad_norm": 1.3162993767841352e-05,
"learning_rate": 9.52929875120077e-06,
"loss": 0.0,
"step": 5450
},
{
"epoch": 5.278377729781617,
"grad_norm": 1.3073189483348808e-05,
"learning_rate": 9.433237271853987e-06,
"loss": 0.0,
"step": 5500
},
{
"epoch": 5.326373890088793,
"grad_norm": 1.2930905567300878e-05,
"learning_rate": 9.337175792507205e-06,
"loss": 0.0,
"step": 5550
},
{
"epoch": 5.374370050395968,
"grad_norm": 1.2106566103261723e-05,
"learning_rate": 9.241114313160424e-06,
"loss": 0.0,
"step": 5600
},
{
"epoch": 5.422366210703144,
"grad_norm": 1.1922876686820753e-05,
"learning_rate": 9.145052833813641e-06,
"loss": 0.0,
"step": 5650
},
{
"epoch": 5.470362371010319,
"grad_norm": 1.1860186229397271e-05,
"learning_rate": 9.048991354466859e-06,
"loss": 0.0,
"step": 5700
},
{
"epoch": 5.518358531317495,
"grad_norm": 1.155322467119426e-05,
"learning_rate": 8.952929875120078e-06,
"loss": 0.0,
"step": 5750
},
{
"epoch": 5.56635469162467,
"grad_norm": 1.1614694309542872e-05,
"learning_rate": 8.856868395773296e-06,
"loss": 0.0,
"step": 5800
},
{
"epoch": 5.614350851931846,
"grad_norm": 1.1487452230456672e-05,
"learning_rate": 8.760806916426513e-06,
"loss": 0.0,
"step": 5850
},
{
"epoch": 5.662347012239021,
"grad_norm": 1.1255000679448552e-05,
"learning_rate": 8.664745437079731e-06,
"loss": 0.0,
"step": 5900
},
{
"epoch": 5.710343172546196,
"grad_norm": 1.0868920449345486e-05,
"learning_rate": 8.56868395773295e-06,
"loss": 0.0,
"step": 5950
},
{
"epoch": 5.758339332853372,
"grad_norm": 1.0667112966458112e-05,
"learning_rate": 8.472622478386168e-06,
"loss": 0.0,
"step": 6000
},
{
"epoch": 5.806335493160547,
"grad_norm": 1.0476619800735052e-05,
"learning_rate": 8.376560999039385e-06,
"loss": 0.0,
"step": 6050
},
{
"epoch": 5.854331653467723,
"grad_norm": 1.0834420314369847e-05,
"learning_rate": 8.280499519692605e-06,
"loss": 0.0,
"step": 6100
},
{
"epoch": 5.902327813774898,
"grad_norm": 9.984561593747498e-06,
"learning_rate": 8.184438040345822e-06,
"loss": 0.0,
"step": 6150
},
{
"epoch": 5.950323974082074,
"grad_norm": 9.762043774343246e-06,
"learning_rate": 8.08837656099904e-06,
"loss": 0.0,
"step": 6200
},
{
"epoch": 5.998320134389249,
"grad_norm": 9.685285230895145e-06,
"learning_rate": 7.992315081652257e-06,
"loss": 0.0,
"step": 6250
},
{
"epoch": 6.046076313894888,
"grad_norm": 9.79160031935247e-06,
"learning_rate": 7.896253602305477e-06,
"loss": 0.0,
"step": 6300
},
{
"epoch": 6.094072474202064,
"grad_norm": 9.704886628791248e-06,
"learning_rate": 7.800192122958694e-06,
"loss": 0.0,
"step": 6350
},
{
"epoch": 6.142068634509239,
"grad_norm": 9.481316392333337e-06,
"learning_rate": 7.704130643611912e-06,
"loss": 0.0,
"step": 6400
},
{
"epoch": 6.190064794816415,
"grad_norm": 9.769090140186923e-06,
"learning_rate": 7.60806916426513e-06,
"loss": 0.0,
"step": 6450
},
{
"epoch": 6.23806095512359,
"grad_norm": 9.710405804777812e-06,
"learning_rate": 7.512007684918349e-06,
"loss": 0.0,
"step": 6500
},
{
"epoch": 6.286057115430766,
"grad_norm": 9.551224996992028e-06,
"learning_rate": 7.415946205571566e-06,
"loss": 0.0,
"step": 6550
},
{
"epoch": 6.334053275737941,
"grad_norm": 9.460116633770695e-06,
"learning_rate": 7.319884726224784e-06,
"loss": 0.0,
"step": 6600
},
{
"epoch": 6.382049436045117,
"grad_norm": 9.070127955759897e-06,
"learning_rate": 7.223823246878002e-06,
"loss": 0.0,
"step": 6650
},
{
"epoch": 6.430045596352292,
"grad_norm": 9.25882734332689e-06,
"learning_rate": 7.127761767531221e-06,
"loss": 0.0,
"step": 6700
},
{
"epoch": 6.478041756659467,
"grad_norm": 8.869373100655615e-06,
"learning_rate": 7.031700288184439e-06,
"loss": 0.0,
"step": 6750
},
{
"epoch": 6.526037916966643,
"grad_norm": 8.892900915978797e-06,
"learning_rate": 6.935638808837657e-06,
"loss": 0.0,
"step": 6800
},
{
"epoch": 6.574034077273818,
"grad_norm": 8.526783850924674e-06,
"learning_rate": 6.839577329490875e-06,
"loss": 0.0,
"step": 6850
},
{
"epoch": 6.622030237580994,
"grad_norm": 8.518234581725505e-06,
"learning_rate": 6.743515850144093e-06,
"loss": 0.0,
"step": 6900
},
{
"epoch": 6.670026397888169,
"grad_norm": 8.66332996942434e-06,
"learning_rate": 6.64745437079731e-06,
"loss": 0.0,
"step": 6950
},
{
"epoch": 6.7180225581953446,
"grad_norm": 8.173589389864108e-06,
"learning_rate": 6.551392891450529e-06,
"loss": 0.0,
"step": 7000
},
{
"epoch": 6.76601871850252,
"grad_norm": 8.530415930494493e-06,
"learning_rate": 6.455331412103747e-06,
"loss": 0.0,
"step": 7050
},
{
"epoch": 6.8140148788096955,
"grad_norm": 7.902741810594393e-06,
"learning_rate": 6.359269932756965e-06,
"loss": 0.0,
"step": 7100
},
{
"epoch": 6.8620110391168705,
"grad_norm": 7.521547200339413e-06,
"learning_rate": 6.263208453410183e-06,
"loss": 0.0,
"step": 7150
},
{
"epoch": 6.910007199424046,
"grad_norm": 7.847376674671114e-06,
"learning_rate": 6.167146974063401e-06,
"loss": 0.0,
"step": 7200
},
{
"epoch": 6.958003359731221,
"grad_norm": 7.490326114588764e-06,
"learning_rate": 6.071085494716619e-06,
"loss": 0.0,
"step": 7250
},
{
"epoch": 7.005759539236861,
"grad_norm": 7.361179075861819e-06,
"learning_rate": 5.9750240153698366e-06,
"loss": 0.0,
"step": 7300
},
{
"epoch": 7.053755699544037,
"grad_norm": 7.84799841299752e-06,
"learning_rate": 5.878962536023055e-06,
"loss": 0.0,
"step": 7350
},
{
"epoch": 7.101751859851212,
"grad_norm": 7.3744048610543344e-06,
"learning_rate": 5.782901056676273e-06,
"loss": 0.0,
"step": 7400
},
{
"epoch": 7.149748020158388,
"grad_norm": 7.150316383943483e-06,
"learning_rate": 5.686839577329492e-06,
"loss": 0.0,
"step": 7450
},
{
"epoch": 7.197744180465563,
"grad_norm": 7.097613651477278e-06,
"learning_rate": 5.590778097982709e-06,
"loss": 0.0,
"step": 7500
},
{
"epoch": 7.2457403407727385,
"grad_norm": 6.9607610650455755e-06,
"learning_rate": 5.494716618635928e-06,
"loss": 0.0,
"step": 7550
},
{
"epoch": 7.293736501079914,
"grad_norm": 7.245377322952251e-06,
"learning_rate": 5.398655139289145e-06,
"loss": 0.0,
"step": 7600
},
{
"epoch": 7.3417326613870895,
"grad_norm": 6.88807709852117e-06,
"learning_rate": 5.302593659942363e-06,
"loss": 0.0,
"step": 7650
},
{
"epoch": 7.3897288216942645,
"grad_norm": 7.55357832084693e-06,
"learning_rate": 5.206532180595581e-06,
"loss": 0.0,
"step": 7700
},
{
"epoch": 7.4377249820014395,
"grad_norm": 6.624821604422753e-06,
"learning_rate": 5.1104707012488e-06,
"loss": 0.0,
"step": 7750
},
{
"epoch": 7.485721142308615,
"grad_norm": 6.872790283632819e-06,
"learning_rate": 5.014409221902018e-06,
"loss": 0.0,
"step": 7800
},
{
"epoch": 7.53371730261579,
"grad_norm": 6.766101911675496e-06,
"learning_rate": 4.918347742555236e-06,
"loss": 0.0,
"step": 7850
},
{
"epoch": 7.581713462922966,
"grad_norm": 6.722151874323171e-06,
"learning_rate": 4.822286263208454e-06,
"loss": 0.0,
"step": 7900
},
{
"epoch": 7.629709623230141,
"grad_norm": 7.021263800205049e-06,
"learning_rate": 4.726224783861672e-06,
"loss": 0.0,
"step": 7950
},
{
"epoch": 7.677705783537317,
"grad_norm": 6.744329865139252e-06,
"learning_rate": 4.630163304514889e-06,
"loss": 0.0,
"step": 8000
},
{
"epoch": 7.725701943844492,
"grad_norm": 6.898602134365942e-06,
"learning_rate": 4.534101825168108e-06,
"loss": 0.0,
"step": 8050
},
{
"epoch": 7.773698104151668,
"grad_norm": 6.8276731090086885e-06,
"learning_rate": 4.438040345821326e-06,
"loss": 0.0,
"step": 8100
},
{
"epoch": 7.821694264458843,
"grad_norm": 7.1020299164188845e-06,
"learning_rate": 4.341978866474544e-06,
"loss": 0.0,
"step": 8150
},
{
"epoch": 7.869690424766019,
"grad_norm": 6.9462739547457135e-06,
"learning_rate": 4.245917387127762e-06,
"loss": 0.0,
"step": 8200
},
{
"epoch": 7.917686585073194,
"grad_norm": 6.350102344704245e-06,
"learning_rate": 4.149855907780981e-06,
"loss": 0.0,
"step": 8250
},
{
"epoch": 7.96568274538037,
"grad_norm": 6.390333490876427e-06,
"learning_rate": 4.053794428434198e-06,
"loss": 0.0,
"step": 8300
},
{
"epoch": 8.013438924886009,
"grad_norm": 6.443431604638428e-06,
"learning_rate": 3.957732949087416e-06,
"loss": 0.0,
"step": 8350
},
{
"epoch": 8.061435085193185,
"grad_norm": 6.41708772500132e-06,
"learning_rate": 3.861671469740634e-06,
"loss": 0.0,
"step": 8400
},
{
"epoch": 8.10943124550036,
"grad_norm": 6.6462080679425145e-06,
"learning_rate": 3.7656099903938526e-06,
"loss": 0.0,
"step": 8450
},
{
"epoch": 8.157427405807535,
"grad_norm": 6.24569134849571e-06,
"learning_rate": 3.66954851104707e-06,
"loss": 0.0,
"step": 8500
},
{
"epoch": 8.20542356611471,
"grad_norm": 5.999653116764563e-06,
"learning_rate": 3.5734870317002885e-06,
"loss": 0.0,
"step": 8550
},
{
"epoch": 8.253419726421885,
"grad_norm": 6.155547250028927e-06,
"learning_rate": 3.4774255523535065e-06,
"loss": 0.0,
"step": 8600
},
{
"epoch": 8.301415886729062,
"grad_norm": 5.940571380634373e-06,
"learning_rate": 3.381364073006724e-06,
"loss": 0.0,
"step": 8650
},
{
"epoch": 8.349412047036237,
"grad_norm": 5.540597800755594e-06,
"learning_rate": 3.2853025936599425e-06,
"loss": 0.0,
"step": 8700
},
{
"epoch": 8.397408207343412,
"grad_norm": 5.88317174610012e-06,
"learning_rate": 3.189241114313161e-06,
"loss": 0.0,
"step": 8750
},
{
"epoch": 8.445404367650587,
"grad_norm": 5.7057810585146455e-06,
"learning_rate": 3.093179634966379e-06,
"loss": 0.0,
"step": 8800
},
{
"epoch": 8.493400527957764,
"grad_norm": 5.697939572040236e-06,
"learning_rate": 2.9971181556195965e-06,
"loss": 0.0,
"step": 8850
},
{
"epoch": 8.541396688264939,
"grad_norm": 6.36870672014845e-06,
"learning_rate": 2.901056676272815e-06,
"loss": 0.0,
"step": 8900
},
{
"epoch": 8.589392848572114,
"grad_norm": 5.485021471826195e-06,
"learning_rate": 2.804995196926033e-06,
"loss": 0.0,
"step": 8950
},
{
"epoch": 8.637389008879289,
"grad_norm": 5.65446099589693e-06,
"learning_rate": 2.708933717579251e-06,
"loss": 0.0,
"step": 9000
},
{
"epoch": 8.685385169186466,
"grad_norm": 5.243595495636662e-06,
"learning_rate": 2.612872238232469e-06,
"loss": 0.0,
"step": 9050
},
{
"epoch": 8.73338132949364,
"grad_norm": 5.42304396874509e-06,
"learning_rate": 2.5168107588856873e-06,
"loss": 0.0,
"step": 9100
},
{
"epoch": 8.781377489800816,
"grad_norm": 6.108981365424752e-06,
"learning_rate": 2.420749279538905e-06,
"loss": 0.0,
"step": 9150
},
{
"epoch": 8.829373650107991,
"grad_norm": 5.738890033802202e-06,
"learning_rate": 2.324687800192123e-06,
"loss": 0.0,
"step": 9200
},
{
"epoch": 8.877369810415168,
"grad_norm": 5.360683207802895e-06,
"learning_rate": 2.2286263208453413e-06,
"loss": 0.0,
"step": 9250
},
{
"epoch": 8.925365970722343,
"grad_norm": 5.861133131587372e-06,
"learning_rate": 2.1325648414985593e-06,
"loss": 0.0,
"step": 9300
},
{
"epoch": 8.973362131029518,
"grad_norm": 5.315087272918953e-06,
"learning_rate": 2.0365033621517773e-06,
"loss": 0.0,
"step": 9350
},
{
"epoch": 9.021118310535158,
"grad_norm": 5.253164106030566e-06,
"learning_rate": 1.9404418828049953e-06,
"loss": 0.0,
"step": 9400
},
{
"epoch": 9.069114470842333,
"grad_norm": 5.336268305847696e-06,
"learning_rate": 1.8443804034582133e-06,
"loss": 0.0,
"step": 9450
},
{
"epoch": 9.117110631149508,
"grad_norm": 5.499407451309478e-06,
"learning_rate": 1.7483189241114315e-06,
"loss": 0.0,
"step": 9500
},
{
"epoch": 9.165106791456683,
"grad_norm": 5.408390950384542e-06,
"learning_rate": 1.6522574447646495e-06,
"loss": 0.0,
"step": 9550
},
{
"epoch": 9.213102951763858,
"grad_norm": 5.553835388347193e-06,
"learning_rate": 1.5561959654178677e-06,
"loss": 0.0,
"step": 9600
},
{
"epoch": 9.261099112071035,
"grad_norm": 5.699601498527537e-06,
"learning_rate": 1.4601344860710855e-06,
"loss": 0.0,
"step": 9650
},
{
"epoch": 9.30909527237821,
"grad_norm": 5.255620911022998e-06,
"learning_rate": 1.3640730067243035e-06,
"loss": 0.0,
"step": 9700
},
{
"epoch": 9.357091432685385,
"grad_norm": 5.273446639593305e-06,
"learning_rate": 1.2680115273775217e-06,
"loss": 0.0,
"step": 9750
},
{
"epoch": 9.40508759299256,
"grad_norm": 5.48100505781922e-06,
"learning_rate": 1.17195004803074e-06,
"loss": 0.0,
"step": 9800
},
{
"epoch": 9.453083753299737,
"grad_norm": 5.553930044483729e-06,
"learning_rate": 1.0758885686839577e-06,
"loss": 0.0,
"step": 9850
},
{
"epoch": 9.501079913606912,
"grad_norm": 5.3176061212886114e-06,
"learning_rate": 9.79827089337176e-07,
"loss": 0.0,
"step": 9900
},
{
"epoch": 9.549076073914087,
"grad_norm": 5.081974239558427e-06,
"learning_rate": 8.837656099903939e-07,
"loss": 0.0,
"step": 9950
},
{
"epoch": 9.597072234221262,
"grad_norm": 5.6227988912106e-06,
"learning_rate": 7.87704130643612e-07,
"loss": 0.0,
"step": 10000
},
{
"epoch": 9.645068394528437,
"grad_norm": 5.376910128361713e-06,
"learning_rate": 6.916426512968301e-07,
"loss": 0.0,
"step": 10050
},
{
"epoch": 9.693064554835614,
"grad_norm": 5.373704154764948e-06,
"learning_rate": 5.955811719500481e-07,
"loss": 0.0,
"step": 10100
},
{
"epoch": 9.741060715142789,
"grad_norm": 5.238107407909212e-06,
"learning_rate": 4.995196926032661e-07,
"loss": 0.0,
"step": 10150
},
{
"epoch": 9.789056875449964,
"grad_norm": 5.298873987229588e-06,
"learning_rate": 4.034582132564842e-07,
"loss": 0.0,
"step": 10200
},
{
"epoch": 9.837053035757139,
"grad_norm": 5.358286256003133e-06,
"learning_rate": 3.0739673390970224e-07,
"loss": 0.0,
"step": 10250
},
{
"epoch": 9.885049196064315,
"grad_norm": 5.384552132125465e-06,
"learning_rate": 2.1133525456292026e-07,
"loss": 0.0,
"step": 10300
},
{
"epoch": 9.93304535637149,
"grad_norm": 5.152297170063697e-06,
"learning_rate": 1.1527377521613833e-07,
"loss": 0.0,
"step": 10350
},
{
"epoch": 9.981041516678665,
"grad_norm": 5.208506862308438e-06,
"learning_rate": 1.921229586935639e-08,
"loss": 0.0,
"step": 10400
}
],
"logging_steps": 50,
"max_steps": 10410,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 4884160649887744.0,
"train_batch_size": 6,
"trial_name": null,
"trial_params": null
}