lemma-model / checkpoint-114591 /trainer_state.json
Ishanan's picture
Add files using upload-large-folder tool
5743bce verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 114591,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.013090033248684452,
"grad_norm": 0.4367656707763672,
"learning_rate": 1.991273311167544e-05,
"loss": 1.32,
"step": 500
},
{
"epoch": 0.026180066497368905,
"grad_norm": 0.39482298493385315,
"learning_rate": 1.9825466223350874e-05,
"loss": 0.0594,
"step": 1000
},
{
"epoch": 0.039270099746053354,
"grad_norm": 0.47824403643608093,
"learning_rate": 1.9738199335026312e-05,
"loss": 0.0497,
"step": 1500
},
{
"epoch": 0.05236013299473781,
"grad_norm": 0.3971627652645111,
"learning_rate": 1.965093244670175e-05,
"loss": 0.043,
"step": 2000
},
{
"epoch": 0.06545016624342226,
"grad_norm": 0.5000212788581848,
"learning_rate": 1.9563665558377188e-05,
"loss": 0.0374,
"step": 2500
},
{
"epoch": 0.07854019949210671,
"grad_norm": 0.9078471064567566,
"learning_rate": 1.9476398670052623e-05,
"loss": 0.0357,
"step": 3000
},
{
"epoch": 0.09163023274079116,
"grad_norm": 0.35205739736557007,
"learning_rate": 1.938913178172806e-05,
"loss": 0.0317,
"step": 3500
},
{
"epoch": 0.10472026598947562,
"grad_norm": 0.19642357528209686,
"learning_rate": 1.9301864893403495e-05,
"loss": 0.0304,
"step": 4000
},
{
"epoch": 0.11781029923816007,
"grad_norm": 0.5134682655334473,
"learning_rate": 1.9214598005078933e-05,
"loss": 0.0281,
"step": 4500
},
{
"epoch": 0.13090033248684452,
"grad_norm": 0.23212824761867523,
"learning_rate": 1.912733111675437e-05,
"loss": 0.0274,
"step": 5000
},
{
"epoch": 0.14399036573552898,
"grad_norm": 0.4254980981349945,
"learning_rate": 1.904006422842981e-05,
"loss": 0.0247,
"step": 5500
},
{
"epoch": 0.15708039898421342,
"grad_norm": 0.4024842083454132,
"learning_rate": 1.8952797340105244e-05,
"loss": 0.0235,
"step": 6000
},
{
"epoch": 0.17017043223289788,
"grad_norm": 0.40947026014328003,
"learning_rate": 1.8865530451780682e-05,
"loss": 0.0235,
"step": 6500
},
{
"epoch": 0.1832604654815823,
"grad_norm": 0.366158127784729,
"learning_rate": 1.877826356345612e-05,
"loss": 0.0225,
"step": 7000
},
{
"epoch": 0.19635049873026678,
"grad_norm": 0.4678824841976166,
"learning_rate": 1.8690996675131558e-05,
"loss": 0.0219,
"step": 7500
},
{
"epoch": 0.20944053197895124,
"grad_norm": 0.21004918217658997,
"learning_rate": 1.8603729786806996e-05,
"loss": 0.0198,
"step": 8000
},
{
"epoch": 0.22253056522763567,
"grad_norm": 0.2192375510931015,
"learning_rate": 1.851646289848243e-05,
"loss": 0.0201,
"step": 8500
},
{
"epoch": 0.23562059847632014,
"grad_norm": 0.15161575376987457,
"learning_rate": 1.842919601015787e-05,
"loss": 0.0192,
"step": 9000
},
{
"epoch": 0.24871063172500457,
"grad_norm": 0.3270639181137085,
"learning_rate": 1.8341929121833303e-05,
"loss": 0.0186,
"step": 9500
},
{
"epoch": 0.26180066497368903,
"grad_norm": 0.25447696447372437,
"learning_rate": 1.825466223350874e-05,
"loss": 0.0182,
"step": 10000
},
{
"epoch": 0.2748906982223735,
"grad_norm": 0.1797029674053192,
"learning_rate": 1.816739534518418e-05,
"loss": 0.0182,
"step": 10500
},
{
"epoch": 0.28798073147105796,
"grad_norm": 0.4422529637813568,
"learning_rate": 1.8080128456859617e-05,
"loss": 0.0175,
"step": 11000
},
{
"epoch": 0.30107076471974237,
"grad_norm": 0.19019261002540588,
"learning_rate": 1.7992861568535052e-05,
"loss": 0.0159,
"step": 11500
},
{
"epoch": 0.31416079796842683,
"grad_norm": 0.12262561917304993,
"learning_rate": 1.790559468021049e-05,
"loss": 0.0162,
"step": 12000
},
{
"epoch": 0.3272508312171113,
"grad_norm": 0.5514086484909058,
"learning_rate": 1.7818327791885924e-05,
"loss": 0.0159,
"step": 12500
},
{
"epoch": 0.34034086446579576,
"grad_norm": 0.3397659957408905,
"learning_rate": 1.7731060903561362e-05,
"loss": 0.0163,
"step": 13000
},
{
"epoch": 0.3534308977144802,
"grad_norm": 0.3542526364326477,
"learning_rate": 1.76437940152368e-05,
"loss": 0.0161,
"step": 13500
},
{
"epoch": 0.3665209309631646,
"grad_norm": 0.22688041627407074,
"learning_rate": 1.7556527126912238e-05,
"loss": 0.0155,
"step": 14000
},
{
"epoch": 0.3796109642118491,
"grad_norm": 0.20115447044372559,
"learning_rate": 1.7469260238587673e-05,
"loss": 0.0148,
"step": 14500
},
{
"epoch": 0.39270099746053355,
"grad_norm": 0.09773228317499161,
"learning_rate": 1.738199335026311e-05,
"loss": 0.0149,
"step": 15000
},
{
"epoch": 0.405791030709218,
"grad_norm": 0.41297146677970886,
"learning_rate": 1.729472646193855e-05,
"loss": 0.0146,
"step": 15500
},
{
"epoch": 0.4188810639579025,
"grad_norm": 0.6495208740234375,
"learning_rate": 1.7207459573613983e-05,
"loss": 0.0136,
"step": 16000
},
{
"epoch": 0.4319710972065869,
"grad_norm": 0.35930490493774414,
"learning_rate": 1.712019268528942e-05,
"loss": 0.0137,
"step": 16500
},
{
"epoch": 0.44506113045527135,
"grad_norm": 0.22953346371650696,
"learning_rate": 1.703292579696486e-05,
"loss": 0.0139,
"step": 17000
},
{
"epoch": 0.4581511637039558,
"grad_norm": 0.41518378257751465,
"learning_rate": 1.6945658908640297e-05,
"loss": 0.0135,
"step": 17500
},
{
"epoch": 0.4712411969526403,
"grad_norm": 0.2171572744846344,
"learning_rate": 1.6858392020315732e-05,
"loss": 0.0129,
"step": 18000
},
{
"epoch": 0.48433123020132474,
"grad_norm": 0.2897777557373047,
"learning_rate": 1.677112513199117e-05,
"loss": 0.0122,
"step": 18500
},
{
"epoch": 0.49742126345000914,
"grad_norm": 0.4209305942058563,
"learning_rate": 1.6683858243666608e-05,
"loss": 0.0128,
"step": 19000
},
{
"epoch": 0.5105112966986937,
"grad_norm": 0.08220311999320984,
"learning_rate": 1.6596591355342046e-05,
"loss": 0.0123,
"step": 19500
},
{
"epoch": 0.5236013299473781,
"grad_norm": 0.09847331047058105,
"learning_rate": 1.650932446701748e-05,
"loss": 0.0123,
"step": 20000
},
{
"epoch": 0.5366913631960625,
"grad_norm": 0.41798558831214905,
"learning_rate": 1.642205757869292e-05,
"loss": 0.0128,
"step": 20500
},
{
"epoch": 0.549781396444747,
"grad_norm": 0.2177186757326126,
"learning_rate": 1.6334790690368353e-05,
"loss": 0.0124,
"step": 21000
},
{
"epoch": 0.5628714296934314,
"grad_norm": 0.16467055678367615,
"learning_rate": 1.624752380204379e-05,
"loss": 0.0118,
"step": 21500
},
{
"epoch": 0.5759614629421159,
"grad_norm": 0.3392176330089569,
"learning_rate": 1.616025691371923e-05,
"loss": 0.0121,
"step": 22000
},
{
"epoch": 0.5890514961908003,
"grad_norm": 0.14208835363388062,
"learning_rate": 1.6072990025394667e-05,
"loss": 0.0118,
"step": 22500
},
{
"epoch": 0.6021415294394847,
"grad_norm": 0.18248602747917175,
"learning_rate": 1.5985723137070105e-05,
"loss": 0.0118,
"step": 23000
},
{
"epoch": 0.6152315626881693,
"grad_norm": 0.32215237617492676,
"learning_rate": 1.589845624874554e-05,
"loss": 0.0111,
"step": 23500
},
{
"epoch": 0.6283215959368537,
"grad_norm": 0.35770946741104126,
"learning_rate": 1.5811189360420978e-05,
"loss": 0.0111,
"step": 24000
},
{
"epoch": 0.6414116291855382,
"grad_norm": 0.21091119945049286,
"learning_rate": 1.5723922472096412e-05,
"loss": 0.0115,
"step": 24500
},
{
"epoch": 0.6545016624342226,
"grad_norm": 0.215097576379776,
"learning_rate": 1.563665558377185e-05,
"loss": 0.0107,
"step": 25000
},
{
"epoch": 0.667591695682907,
"grad_norm": 0.15477751195430756,
"learning_rate": 1.554938869544729e-05,
"loss": 0.0105,
"step": 25500
},
{
"epoch": 0.6806817289315915,
"grad_norm": 0.1576426774263382,
"learning_rate": 1.5462121807122726e-05,
"loss": 0.0105,
"step": 26000
},
{
"epoch": 0.6937717621802759,
"grad_norm": 0.19675689935684204,
"learning_rate": 1.537485491879816e-05,
"loss": 0.0096,
"step": 26500
},
{
"epoch": 0.7068617954289604,
"grad_norm": 0.480955570936203,
"learning_rate": 1.52875880304736e-05,
"loss": 0.0104,
"step": 27000
},
{
"epoch": 0.7199518286776448,
"grad_norm": 0.2651515603065491,
"learning_rate": 1.5200321142149035e-05,
"loss": 0.0106,
"step": 27500
},
{
"epoch": 0.7330418619263293,
"grad_norm": 0.21407313644886017,
"learning_rate": 1.5113054253824473e-05,
"loss": 0.0102,
"step": 28000
},
{
"epoch": 0.7461318951750138,
"grad_norm": 0.13915963470935822,
"learning_rate": 1.502578736549991e-05,
"loss": 0.0098,
"step": 28500
},
{
"epoch": 0.7592219284236982,
"grad_norm": 0.09625498950481415,
"learning_rate": 1.4938520477175348e-05,
"loss": 0.0099,
"step": 29000
},
{
"epoch": 0.7723119616723827,
"grad_norm": 0.26766207814216614,
"learning_rate": 1.4851253588850782e-05,
"loss": 0.01,
"step": 29500
},
{
"epoch": 0.7854019949210671,
"grad_norm": 0.03729819133877754,
"learning_rate": 1.476398670052622e-05,
"loss": 0.0092,
"step": 30000
},
{
"epoch": 0.7984920281697515,
"grad_norm": 0.19327211380004883,
"learning_rate": 1.4676719812201658e-05,
"loss": 0.0094,
"step": 30500
},
{
"epoch": 0.811582061418436,
"grad_norm": 0.27896180748939514,
"learning_rate": 1.4589452923877094e-05,
"loss": 0.01,
"step": 31000
},
{
"epoch": 0.8246720946671204,
"grad_norm": 0.1490613967180252,
"learning_rate": 1.4502186035552532e-05,
"loss": 0.0092,
"step": 31500
},
{
"epoch": 0.837762127915805,
"grad_norm": 0.22722095251083374,
"learning_rate": 1.4414919147227969e-05,
"loss": 0.0093,
"step": 32000
},
{
"epoch": 0.8508521611644894,
"grad_norm": 0.2942313849925995,
"learning_rate": 1.4327652258903407e-05,
"loss": 0.0098,
"step": 32500
},
{
"epoch": 0.8639421944131738,
"grad_norm": 0.37654176354408264,
"learning_rate": 1.4240385370578841e-05,
"loss": 0.0095,
"step": 33000
},
{
"epoch": 0.8770322276618583,
"grad_norm": 0.21543939411640167,
"learning_rate": 1.415311848225428e-05,
"loss": 0.0097,
"step": 33500
},
{
"epoch": 0.8901222609105427,
"grad_norm": 0.4608267843723297,
"learning_rate": 1.4065851593929716e-05,
"loss": 0.0093,
"step": 34000
},
{
"epoch": 0.9032122941592272,
"grad_norm": 0.1784515380859375,
"learning_rate": 1.3978584705605154e-05,
"loss": 0.009,
"step": 34500
},
{
"epoch": 0.9163023274079116,
"grad_norm": 0.3353884518146515,
"learning_rate": 1.389131781728059e-05,
"loss": 0.009,
"step": 35000
},
{
"epoch": 0.929392360656596,
"grad_norm": 0.2330337017774582,
"learning_rate": 1.3804050928956028e-05,
"loss": 0.009,
"step": 35500
},
{
"epoch": 0.9424823939052805,
"grad_norm": 0.32975077629089355,
"learning_rate": 1.3716784040631464e-05,
"loss": 0.0092,
"step": 36000
},
{
"epoch": 0.955572427153965,
"grad_norm": 0.03280099481344223,
"learning_rate": 1.3629517152306902e-05,
"loss": 0.0084,
"step": 36500
},
{
"epoch": 0.9686624604026495,
"grad_norm": 0.22344884276390076,
"learning_rate": 1.3542250263982337e-05,
"loss": 0.0089,
"step": 37000
},
{
"epoch": 0.9817524936513339,
"grad_norm": 0.361147403717041,
"learning_rate": 1.3454983375657775e-05,
"loss": 0.0084,
"step": 37500
},
{
"epoch": 0.9948425269000183,
"grad_norm": 0.2099611908197403,
"learning_rate": 1.3367716487333213e-05,
"loss": 0.0088,
"step": 38000
},
{
"epoch": 1.0,
"eval_loss": 0.005731215700507164,
"eval_runtime": 2297.4971,
"eval_samples_per_second": 133.003,
"eval_steps_per_second": 16.625,
"step": 38197
},
{
"epoch": 1.0079325601487028,
"grad_norm": 0.24136124551296234,
"learning_rate": 1.3280449599008649e-05,
"loss": 0.0078,
"step": 38500
},
{
"epoch": 1.0210225933973873,
"grad_norm": 0.42886942625045776,
"learning_rate": 1.3193182710684087e-05,
"loss": 0.0082,
"step": 39000
},
{
"epoch": 1.0341126266460716,
"grad_norm": 0.10831937938928604,
"learning_rate": 1.3105915822359523e-05,
"loss": 0.0071,
"step": 39500
},
{
"epoch": 1.0472026598947561,
"grad_norm": 0.20537184178829193,
"learning_rate": 1.3018648934034961e-05,
"loss": 0.0081,
"step": 40000
},
{
"epoch": 1.0602926931434407,
"grad_norm": 0.28797346353530884,
"learning_rate": 1.2931382045710396e-05,
"loss": 0.0075,
"step": 40500
},
{
"epoch": 1.073382726392125,
"grad_norm": 0.22928300499916077,
"learning_rate": 1.2844115157385834e-05,
"loss": 0.0076,
"step": 41000
},
{
"epoch": 1.0864727596408095,
"grad_norm": 0.003099123015999794,
"learning_rate": 1.275684826906127e-05,
"loss": 0.0079,
"step": 41500
},
{
"epoch": 1.099562792889494,
"grad_norm": 0.287396639585495,
"learning_rate": 1.2669581380736708e-05,
"loss": 0.0075,
"step": 42000
},
{
"epoch": 1.1126528261381785,
"grad_norm": 0.07049620896577835,
"learning_rate": 1.2582314492412144e-05,
"loss": 0.007,
"step": 42500
},
{
"epoch": 1.1257428593868628,
"grad_norm": 0.2029752880334854,
"learning_rate": 1.2495047604087582e-05,
"loss": 0.0076,
"step": 43000
},
{
"epoch": 1.1388328926355473,
"grad_norm": 0.04248388856649399,
"learning_rate": 1.2407780715763019e-05,
"loss": 0.0072,
"step": 43500
},
{
"epoch": 1.1519229258842318,
"grad_norm": 0.4174834191799164,
"learning_rate": 1.2320513827438457e-05,
"loss": 0.0077,
"step": 44000
},
{
"epoch": 1.1650129591329161,
"grad_norm": 0.25623586773872375,
"learning_rate": 1.2233246939113891e-05,
"loss": 0.0069,
"step": 44500
},
{
"epoch": 1.1781029923816007,
"grad_norm": 0.12231756001710892,
"learning_rate": 1.214598005078933e-05,
"loss": 0.0076,
"step": 45000
},
{
"epoch": 1.1911930256302852,
"grad_norm": 0.24928466975688934,
"learning_rate": 1.2058713162464767e-05,
"loss": 0.0072,
"step": 45500
},
{
"epoch": 1.2042830588789695,
"grad_norm": 0.11469607055187225,
"learning_rate": 1.1971446274140204e-05,
"loss": 0.0068,
"step": 46000
},
{
"epoch": 1.217373092127654,
"grad_norm": 0.389217734336853,
"learning_rate": 1.1884179385815642e-05,
"loss": 0.0072,
"step": 46500
},
{
"epoch": 1.2304631253763385,
"grad_norm": 0.1249752938747406,
"learning_rate": 1.1796912497491078e-05,
"loss": 0.0065,
"step": 47000
},
{
"epoch": 1.2435531586250228,
"grad_norm": 0.2189619392156601,
"learning_rate": 1.1709645609166516e-05,
"loss": 0.0073,
"step": 47500
},
{
"epoch": 1.2566431918737073,
"grad_norm": 0.3927897810935974,
"learning_rate": 1.1622378720841952e-05,
"loss": 0.0069,
"step": 48000
},
{
"epoch": 1.2697332251223918,
"grad_norm": 0.07470349222421646,
"learning_rate": 1.153511183251739e-05,
"loss": 0.0076,
"step": 48500
},
{
"epoch": 1.2828232583710761,
"grad_norm": 0.3911282420158386,
"learning_rate": 1.1447844944192825e-05,
"loss": 0.0071,
"step": 49000
},
{
"epoch": 1.2959132916197607,
"grad_norm": 0.2361019402742386,
"learning_rate": 1.1360578055868263e-05,
"loss": 0.0074,
"step": 49500
},
{
"epoch": 1.3090033248684452,
"grad_norm": 0.1682516634464264,
"learning_rate": 1.1273311167543699e-05,
"loss": 0.0076,
"step": 50000
},
{
"epoch": 1.3220933581171297,
"grad_norm": 0.1294177621603012,
"learning_rate": 1.1186044279219137e-05,
"loss": 0.0065,
"step": 50500
},
{
"epoch": 1.335183391365814,
"grad_norm": 0.5710951685905457,
"learning_rate": 1.1098777390894573e-05,
"loss": 0.0068,
"step": 51000
},
{
"epoch": 1.3482734246144985,
"grad_norm": 0.12587948143482208,
"learning_rate": 1.1011510502570011e-05,
"loss": 0.0071,
"step": 51500
},
{
"epoch": 1.361363457863183,
"grad_norm": 0.37146255373954773,
"learning_rate": 1.0924243614245446e-05,
"loss": 0.0069,
"step": 52000
},
{
"epoch": 1.3744534911118675,
"grad_norm": 0.29824337363243103,
"learning_rate": 1.0836976725920884e-05,
"loss": 0.0069,
"step": 52500
},
{
"epoch": 1.3875435243605518,
"grad_norm": 0.10349422693252563,
"learning_rate": 1.0749709837596322e-05,
"loss": 0.007,
"step": 53000
},
{
"epoch": 1.4006335576092364,
"grad_norm": 0.2823665142059326,
"learning_rate": 1.0662442949271758e-05,
"loss": 0.0069,
"step": 53500
},
{
"epoch": 1.4137235908579209,
"grad_norm": 0.038343362510204315,
"learning_rate": 1.0575176060947196e-05,
"loss": 0.0061,
"step": 54000
},
{
"epoch": 1.4268136241066052,
"grad_norm": 0.2581956088542938,
"learning_rate": 1.0487909172622633e-05,
"loss": 0.0062,
"step": 54500
},
{
"epoch": 1.4399036573552897,
"grad_norm": 0.11447520554065704,
"learning_rate": 1.040064228429807e-05,
"loss": 0.0066,
"step": 55000
},
{
"epoch": 1.4529936906039742,
"grad_norm": 0.4050372838973999,
"learning_rate": 1.0313375395973507e-05,
"loss": 0.007,
"step": 55500
},
{
"epoch": 1.4660837238526585,
"grad_norm": 0.022968396544456482,
"learning_rate": 1.0226108507648945e-05,
"loss": 0.0066,
"step": 56000
},
{
"epoch": 1.479173757101343,
"grad_norm": 0.07500626146793365,
"learning_rate": 1.013884161932438e-05,
"loss": 0.0065,
"step": 56500
},
{
"epoch": 1.4922637903500275,
"grad_norm": 0.031823791563510895,
"learning_rate": 1.0051574730999817e-05,
"loss": 0.0063,
"step": 57000
},
{
"epoch": 1.5053538235987118,
"grad_norm": 0.19768255949020386,
"learning_rate": 9.964307842675255e-06,
"loss": 0.0068,
"step": 57500
},
{
"epoch": 1.5184438568473964,
"grad_norm": 0.210379958152771,
"learning_rate": 9.877040954350692e-06,
"loss": 0.0064,
"step": 58000
},
{
"epoch": 1.5315338900960809,
"grad_norm": 0.14373145997524261,
"learning_rate": 9.789774066026128e-06,
"loss": 0.007,
"step": 58500
},
{
"epoch": 1.5446239233447652,
"grad_norm": 0.2256031185388565,
"learning_rate": 9.702507177701566e-06,
"loss": 0.0063,
"step": 59000
},
{
"epoch": 1.55771395659345,
"grad_norm": 0.11486474424600601,
"learning_rate": 9.615240289377002e-06,
"loss": 0.0062,
"step": 59500
},
{
"epoch": 1.5708039898421342,
"grad_norm": 0.20883575081825256,
"learning_rate": 9.527973401052439e-06,
"loss": 0.0061,
"step": 60000
},
{
"epoch": 1.5838940230908185,
"grad_norm": 0.15535525977611542,
"learning_rate": 9.440706512727877e-06,
"loss": 0.006,
"step": 60500
},
{
"epoch": 1.5969840563395032,
"grad_norm": 0.2790842652320862,
"learning_rate": 9.353439624403313e-06,
"loss": 0.0065,
"step": 61000
},
{
"epoch": 1.6100740895881875,
"grad_norm": 0.338480681180954,
"learning_rate": 9.266172736078751e-06,
"loss": 0.0066,
"step": 61500
},
{
"epoch": 1.623164122836872,
"grad_norm": 0.3213784098625183,
"learning_rate": 9.178905847754187e-06,
"loss": 0.0064,
"step": 62000
},
{
"epoch": 1.6362541560855566,
"grad_norm": 0.22697031497955322,
"learning_rate": 9.091638959429623e-06,
"loss": 0.0066,
"step": 62500
},
{
"epoch": 1.6493441893342409,
"grad_norm": 0.12834736704826355,
"learning_rate": 9.004372071105061e-06,
"loss": 0.0063,
"step": 63000
},
{
"epoch": 1.6624342225829254,
"grad_norm": 0.08631685376167297,
"learning_rate": 8.9171051827805e-06,
"loss": 0.0058,
"step": 63500
},
{
"epoch": 1.67552425583161,
"grad_norm": 0.12295836955308914,
"learning_rate": 8.829838294455936e-06,
"loss": 0.0054,
"step": 64000
},
{
"epoch": 1.6886142890802942,
"grad_norm": 0.3893487751483917,
"learning_rate": 8.742571406131372e-06,
"loss": 0.0062,
"step": 64500
},
{
"epoch": 1.7017043223289787,
"grad_norm": 0.14583726227283478,
"learning_rate": 8.65530451780681e-06,
"loss": 0.0063,
"step": 65000
},
{
"epoch": 1.7147943555776632,
"grad_norm": 0.3584669232368469,
"learning_rate": 8.568037629482246e-06,
"loss": 0.0064,
"step": 65500
},
{
"epoch": 1.7278843888263475,
"grad_norm": 0.05680006742477417,
"learning_rate": 8.480770741157683e-06,
"loss": 0.0061,
"step": 66000
},
{
"epoch": 1.740974422075032,
"grad_norm": 0.15972712635993958,
"learning_rate": 8.39350385283312e-06,
"loss": 0.0061,
"step": 66500
},
{
"epoch": 1.7540644553237166,
"grad_norm": 0.3504839837551117,
"learning_rate": 8.306236964508557e-06,
"loss": 0.0056,
"step": 67000
},
{
"epoch": 1.7671544885724009,
"grad_norm": 0.2058769315481186,
"learning_rate": 8.218970076183995e-06,
"loss": 0.0062,
"step": 67500
},
{
"epoch": 1.7802445218210854,
"grad_norm": 0.2028743475675583,
"learning_rate": 8.131703187859431e-06,
"loss": 0.0062,
"step": 68000
},
{
"epoch": 1.79333455506977,
"grad_norm": 0.11009418964385986,
"learning_rate": 8.044436299534868e-06,
"loss": 0.0062,
"step": 68500
},
{
"epoch": 1.8064245883184542,
"grad_norm": 0.0618484802544117,
"learning_rate": 7.957169411210305e-06,
"loss": 0.0062,
"step": 69000
},
{
"epoch": 1.819514621567139,
"grad_norm": 0.08521759510040283,
"learning_rate": 7.869902522885742e-06,
"loss": 0.0059,
"step": 69500
},
{
"epoch": 1.8326046548158232,
"grad_norm": 0.3029402196407318,
"learning_rate": 7.782635634561178e-06,
"loss": 0.0059,
"step": 70000
},
{
"epoch": 1.8456946880645075,
"grad_norm": 0.23642723262310028,
"learning_rate": 7.695368746236616e-06,
"loss": 0.006,
"step": 70500
},
{
"epoch": 1.8587847213131923,
"grad_norm": 0.14888063073158264,
"learning_rate": 7.608101857912053e-06,
"loss": 0.006,
"step": 71000
},
{
"epoch": 1.8718747545618766,
"grad_norm": 0.10285039246082306,
"learning_rate": 7.52083496958749e-06,
"loss": 0.0054,
"step": 71500
},
{
"epoch": 1.884964787810561,
"grad_norm": 0.1975327879190445,
"learning_rate": 7.4335680812629275e-06,
"loss": 0.0059,
"step": 72000
},
{
"epoch": 1.8980548210592456,
"grad_norm": 0.24295471608638763,
"learning_rate": 7.346301192938365e-06,
"loss": 0.0058,
"step": 72500
},
{
"epoch": 1.91114485430793,
"grad_norm": 0.03384074568748474,
"learning_rate": 7.259034304613801e-06,
"loss": 0.0058,
"step": 73000
},
{
"epoch": 1.9242348875566144,
"grad_norm": 0.18082177639007568,
"learning_rate": 7.171767416289238e-06,
"loss": 0.0058,
"step": 73500
},
{
"epoch": 1.937324920805299,
"grad_norm": 0.33321407437324524,
"learning_rate": 7.084500527964675e-06,
"loss": 0.0057,
"step": 74000
},
{
"epoch": 1.9504149540539832,
"grad_norm": 0.0694960206747055,
"learning_rate": 6.9972336396401115e-06,
"loss": 0.0058,
"step": 74500
},
{
"epoch": 1.9635049873026678,
"grad_norm": 0.11903239041566849,
"learning_rate": 6.909966751315549e-06,
"loss": 0.0057,
"step": 75000
},
{
"epoch": 1.9765950205513523,
"grad_norm": 0.28279566764831543,
"learning_rate": 6.822699862990986e-06,
"loss": 0.0054,
"step": 75500
},
{
"epoch": 1.9896850538000366,
"grad_norm": 0.12491460144519806,
"learning_rate": 6.735432974666423e-06,
"loss": 0.0055,
"step": 76000
},
{
"epoch": 2.0,
"eval_loss": 0.0037132962606847286,
"eval_runtime": 2298.3508,
"eval_samples_per_second": 132.953,
"eval_steps_per_second": 16.619,
"step": 76394
},
{
"epoch": 2.0027750870487213,
"grad_norm": 0.17459292709827423,
"learning_rate": 6.648166086341859e-06,
"loss": 0.0053,
"step": 76500
},
{
"epoch": 2.0158651202974056,
"grad_norm": 0.12316348403692245,
"learning_rate": 6.560899198017296e-06,
"loss": 0.0048,
"step": 77000
},
{
"epoch": 2.02895515354609,
"grad_norm": 0.2703343331813812,
"learning_rate": 6.4736323096927336e-06,
"loss": 0.0052,
"step": 77500
},
{
"epoch": 2.0420451867947746,
"grad_norm": 0.16996227204799652,
"learning_rate": 6.386365421368171e-06,
"loss": 0.0051,
"step": 78000
},
{
"epoch": 2.055135220043459,
"grad_norm": 0.11727280914783478,
"learning_rate": 6.299098533043609e-06,
"loss": 0.0047,
"step": 78500
},
{
"epoch": 2.0682252532921432,
"grad_norm": 0.2634254992008209,
"learning_rate": 6.211831644719045e-06,
"loss": 0.0049,
"step": 79000
},
{
"epoch": 2.081315286540828,
"grad_norm": 0.10610348731279373,
"learning_rate": 6.124564756394482e-06,
"loss": 0.0052,
"step": 79500
},
{
"epoch": 2.0944053197895123,
"grad_norm": 0.09022970497608185,
"learning_rate": 6.037297868069919e-06,
"loss": 0.005,
"step": 80000
},
{
"epoch": 2.1074953530381966,
"grad_norm": 0.061074189841747284,
"learning_rate": 5.9500309797453556e-06,
"loss": 0.0049,
"step": 80500
},
{
"epoch": 2.1205853862868813,
"grad_norm": 0.13339538872241974,
"learning_rate": 5.862764091420793e-06,
"loss": 0.0047,
"step": 81000
},
{
"epoch": 2.1336754195355656,
"grad_norm": 0.0852557048201561,
"learning_rate": 5.77549720309623e-06,
"loss": 0.0051,
"step": 81500
},
{
"epoch": 2.14676545278425,
"grad_norm": 0.10160050541162491,
"learning_rate": 5.688230314771667e-06,
"loss": 0.0048,
"step": 82000
},
{
"epoch": 2.1598554860329346,
"grad_norm": 0.04886355251073837,
"learning_rate": 5.600963426447103e-06,
"loss": 0.0048,
"step": 82500
},
{
"epoch": 2.172945519281619,
"grad_norm": 0.13570809364318848,
"learning_rate": 5.5136965381225404e-06,
"loss": 0.0048,
"step": 83000
},
{
"epoch": 2.1860355525303032,
"grad_norm": 0.4608564078807831,
"learning_rate": 5.426429649797978e-06,
"loss": 0.0047,
"step": 83500
},
{
"epoch": 2.199125585778988,
"grad_norm": 0.21086885035037994,
"learning_rate": 5.339162761473414e-06,
"loss": 0.0048,
"step": 84000
},
{
"epoch": 2.2122156190276723,
"grad_norm": 0.10506796091794968,
"learning_rate": 5.251895873148851e-06,
"loss": 0.0045,
"step": 84500
},
{
"epoch": 2.225305652276357,
"grad_norm": 0.11701245605945587,
"learning_rate": 5.164628984824288e-06,
"loss": 0.0052,
"step": 85000
},
{
"epoch": 2.2383956855250413,
"grad_norm": 0.12591439485549927,
"learning_rate": 5.077362096499725e-06,
"loss": 0.0051,
"step": 85500
},
{
"epoch": 2.2514857187737256,
"grad_norm": 0.17858508229255676,
"learning_rate": 4.9900952081751625e-06,
"loss": 0.0046,
"step": 86000
},
{
"epoch": 2.2645757520224103,
"grad_norm": 0.1313404142856598,
"learning_rate": 4.9028283198506e-06,
"loss": 0.0049,
"step": 86500
},
{
"epoch": 2.2776657852710946,
"grad_norm": 0.017278827726840973,
"learning_rate": 4.815561431526036e-06,
"loss": 0.0046,
"step": 87000
},
{
"epoch": 2.290755818519779,
"grad_norm": 0.08634085208177567,
"learning_rate": 4.728294543201473e-06,
"loss": 0.0047,
"step": 87500
},
{
"epoch": 2.3038458517684637,
"grad_norm": 0.15097448229789734,
"learning_rate": 4.641027654876911e-06,
"loss": 0.0049,
"step": 88000
},
{
"epoch": 2.316935885017148,
"grad_norm": 0.16411340236663818,
"learning_rate": 4.553760766552347e-06,
"loss": 0.0048,
"step": 88500
},
{
"epoch": 2.3300259182658323,
"grad_norm": 0.15543577075004578,
"learning_rate": 4.4664938782277845e-06,
"loss": 0.0045,
"step": 89000
},
{
"epoch": 2.343115951514517,
"grad_norm": 0.20463259518146515,
"learning_rate": 4.379226989903222e-06,
"loss": 0.0045,
"step": 89500
},
{
"epoch": 2.3562059847632013,
"grad_norm": 0.04198065027594566,
"learning_rate": 4.291960101578658e-06,
"loss": 0.0046,
"step": 90000
},
{
"epoch": 2.3692960180118856,
"grad_norm": 0.16238822042942047,
"learning_rate": 4.204693213254095e-06,
"loss": 0.0045,
"step": 90500
},
{
"epoch": 2.3823860512605703,
"grad_norm": 0.012462672777473927,
"learning_rate": 4.117426324929532e-06,
"loss": 0.0051,
"step": 91000
},
{
"epoch": 2.3954760845092546,
"grad_norm": 0.015213750302791595,
"learning_rate": 4.030159436604969e-06,
"loss": 0.0044,
"step": 91500
},
{
"epoch": 2.408566117757939,
"grad_norm": 0.040404822677373886,
"learning_rate": 3.9428925482804065e-06,
"loss": 0.0049,
"step": 92000
},
{
"epoch": 2.4216561510066237,
"grad_norm": 0.15160289406776428,
"learning_rate": 3.855625659955844e-06,
"loss": 0.0049,
"step": 92500
},
{
"epoch": 2.434746184255308,
"grad_norm": 0.08896700292825699,
"learning_rate": 3.7683587716312804e-06,
"loss": 0.0046,
"step": 93000
},
{
"epoch": 2.4478362175039923,
"grad_norm": 0.015480602160096169,
"learning_rate": 3.681091883306717e-06,
"loss": 0.0044,
"step": 93500
},
{
"epoch": 2.460926250752677,
"grad_norm": 0.011969960294663906,
"learning_rate": 3.5938249949821542e-06,
"loss": 0.0054,
"step": 94000
},
{
"epoch": 2.4740162840013613,
"grad_norm": 0.11534956097602844,
"learning_rate": 3.506558106657591e-06,
"loss": 0.0046,
"step": 94500
},
{
"epoch": 2.4871063172500456,
"grad_norm": 0.2974827289581299,
"learning_rate": 3.419291218333028e-06,
"loss": 0.0043,
"step": 95000
},
{
"epoch": 2.5001963504987303,
"grad_norm": 0.22537314891815186,
"learning_rate": 3.3320243300084652e-06,
"loss": 0.0044,
"step": 95500
},
{
"epoch": 2.5132863837474146,
"grad_norm": 0.4512380361557007,
"learning_rate": 3.2447574416839024e-06,
"loss": 0.0041,
"step": 96000
},
{
"epoch": 2.526376416996099,
"grad_norm": 0.11714764684438705,
"learning_rate": 3.157490553359339e-06,
"loss": 0.0044,
"step": 96500
},
{
"epoch": 2.5394664502447837,
"grad_norm": 0.33583053946495056,
"learning_rate": 3.0702236650347762e-06,
"loss": 0.0046,
"step": 97000
},
{
"epoch": 2.552556483493468,
"grad_norm": 0.08749152719974518,
"learning_rate": 2.982956776710213e-06,
"loss": 0.0046,
"step": 97500
},
{
"epoch": 2.5656465167421523,
"grad_norm": 0.213958740234375,
"learning_rate": 2.89568988838565e-06,
"loss": 0.0042,
"step": 98000
},
{
"epoch": 2.578736549990837,
"grad_norm": 0.019278518855571747,
"learning_rate": 2.808423000061087e-06,
"loss": 0.0048,
"step": 98500
},
{
"epoch": 2.5918265832395213,
"grad_norm": 0.2929363548755646,
"learning_rate": 2.7211561117365244e-06,
"loss": 0.0041,
"step": 99000
},
{
"epoch": 2.6049166164882056,
"grad_norm": 0.13888810575008392,
"learning_rate": 2.633889223411961e-06,
"loss": 0.0043,
"step": 99500
},
{
"epoch": 2.6180066497368903,
"grad_norm": 0.0986974686384201,
"learning_rate": 2.5466223350873983e-06,
"loss": 0.0046,
"step": 100000
},
{
"epoch": 2.6310966829855746,
"grad_norm": 0.3214912712574005,
"learning_rate": 2.459355446762835e-06,
"loss": 0.0047,
"step": 100500
},
{
"epoch": 2.6441867162342594,
"grad_norm": 0.0012846454046666622,
"learning_rate": 2.3720885584382717e-06,
"loss": 0.0045,
"step": 101000
},
{
"epoch": 2.6572767494829437,
"grad_norm": 0.3178112506866455,
"learning_rate": 2.284821670113709e-06,
"loss": 0.0045,
"step": 101500
},
{
"epoch": 2.670366782731628,
"grad_norm": 0.26486077904701233,
"learning_rate": 2.197554781789146e-06,
"loss": 0.0043,
"step": 102000
},
{
"epoch": 2.6834568159803127,
"grad_norm": 0.1955183446407318,
"learning_rate": 2.1102878934645827e-06,
"loss": 0.0043,
"step": 102500
},
{
"epoch": 2.696546849228997,
"grad_norm": 0.27978459000587463,
"learning_rate": 2.02302100514002e-06,
"loss": 0.0043,
"step": 103000
},
{
"epoch": 2.7096368824776818,
"grad_norm": 0.16327758133411407,
"learning_rate": 1.935754116815457e-06,
"loss": 0.0044,
"step": 103500
},
{
"epoch": 2.722726915726366,
"grad_norm": 0.11214172095060349,
"learning_rate": 1.848487228490894e-06,
"loss": 0.0044,
"step": 104000
},
{
"epoch": 2.7358169489750503,
"grad_norm": 0.270905077457428,
"learning_rate": 1.7612203401663309e-06,
"loss": 0.0046,
"step": 104500
},
{
"epoch": 2.748906982223735,
"grad_norm": 0.14749902486801147,
"learning_rate": 1.6739534518417678e-06,
"loss": 0.0046,
"step": 105000
},
{
"epoch": 2.7619970154724194,
"grad_norm": 0.058535072952508926,
"learning_rate": 1.586686563517205e-06,
"loss": 0.0043,
"step": 105500
},
{
"epoch": 2.7750870487211037,
"grad_norm": 0.03967679664492607,
"learning_rate": 1.4994196751926419e-06,
"loss": 0.0043,
"step": 106000
},
{
"epoch": 2.7881770819697884,
"grad_norm": 0.22382740676403046,
"learning_rate": 1.4121527868680788e-06,
"loss": 0.0044,
"step": 106500
},
{
"epoch": 2.8012671152184727,
"grad_norm": 0.19118061661720276,
"learning_rate": 1.3248858985435155e-06,
"loss": 0.004,
"step": 107000
},
{
"epoch": 2.814357148467157,
"grad_norm": 0.18843971192836761,
"learning_rate": 1.2376190102189527e-06,
"loss": 0.0043,
"step": 107500
},
{
"epoch": 2.8274471817158418,
"grad_norm": 0.22385017573833466,
"learning_rate": 1.1503521218943898e-06,
"loss": 0.0044,
"step": 108000
},
{
"epoch": 2.840537214964526,
"grad_norm": 0.0888708084821701,
"learning_rate": 1.0630852335698265e-06,
"loss": 0.0045,
"step": 108500
},
{
"epoch": 2.8536272482132103,
"grad_norm": 0.0975847914814949,
"learning_rate": 9.758183452452637e-07,
"loss": 0.0047,
"step": 109000
},
{
"epoch": 2.866717281461895,
"grad_norm": 0.09466837346553802,
"learning_rate": 8.885514569207007e-07,
"loss": 0.0046,
"step": 109500
},
{
"epoch": 2.8798073147105794,
"grad_norm": 0.18789631128311157,
"learning_rate": 8.012845685961376e-07,
"loss": 0.0044,
"step": 110000
},
{
"epoch": 2.8928973479592637,
"grad_norm": 0.12237800657749176,
"learning_rate": 7.140176802715747e-07,
"loss": 0.004,
"step": 110500
},
{
"epoch": 2.9059873812079484,
"grad_norm": 0.13793490827083588,
"learning_rate": 6.267507919470116e-07,
"loss": 0.0046,
"step": 111000
},
{
"epoch": 2.9190774144566327,
"grad_norm": 0.1080477237701416,
"learning_rate": 5.394839036224485e-07,
"loss": 0.0041,
"step": 111500
},
{
"epoch": 2.932167447705317,
"grad_norm": 0.20041316747665405,
"learning_rate": 4.5221701529788553e-07,
"loss": 0.0043,
"step": 112000
},
{
"epoch": 2.9452574809540017,
"grad_norm": 0.3370501399040222,
"learning_rate": 3.649501269733225e-07,
"loss": 0.0045,
"step": 112500
},
{
"epoch": 2.958347514202686,
"grad_norm": 0.011929438449442387,
"learning_rate": 2.776832386487595e-07,
"loss": 0.0042,
"step": 113000
},
{
"epoch": 2.9714375474513703,
"grad_norm": 0.3616108000278473,
"learning_rate": 1.904163503241965e-07,
"loss": 0.0042,
"step": 113500
},
{
"epoch": 2.984527580700055,
"grad_norm": 0.04300173744559288,
"learning_rate": 1.0314946199963349e-07,
"loss": 0.0046,
"step": 114000
},
{
"epoch": 2.9976176139487394,
"grad_norm": 0.2760555148124695,
"learning_rate": 1.588257367507047e-08,
"loss": 0.0043,
"step": 114500
},
{
"epoch": 3.0,
"eval_loss": 0.002946872031316161,
"eval_runtime": 2297.682,
"eval_samples_per_second": 132.992,
"eval_steps_per_second": 16.624,
"step": 114591
}
],
"logging_steps": 500,
"max_steps": 114591,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 6.986962453266432e+16,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}