RMFT-Checkpoints / base /80e3 /trainer_state.json
Garsa3112's picture
Add files using upload-large-folder tool
e6cd795 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.7987684187376294,
"eval_steps": 500,
"global_step": 50904,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.027490653177919506,
"grad_norm": 7.844790935516357,
"learning_rate": 1.98e-05,
"loss": 3.3532,
"step": 500
},
{
"epoch": 0.05498130635583901,
"grad_norm": 7.924389362335205,
"learning_rate": 1.9816883693400415e-05,
"loss": 2.6056,
"step": 1000
},
{
"epoch": 0.08247195953375852,
"grad_norm": 7.497577667236328,
"learning_rate": 1.963191772713821e-05,
"loss": 2.5447,
"step": 1500
},
{
"epoch": 0.10996261271167802,
"grad_norm": 6.73529577255249,
"learning_rate": 1.9446951760876e-05,
"loss": 2.4492,
"step": 2000
},
{
"epoch": 0.13745326588959753,
"grad_norm": 6.306326866149902,
"learning_rate": 1.9261985794613793e-05,
"loss": 2.4463,
"step": 2500
},
{
"epoch": 0.16494391906751704,
"grad_norm": 5.651560306549072,
"learning_rate": 1.9077019828351584e-05,
"loss": 2.3291,
"step": 3000
},
{
"epoch": 0.19243457224543656,
"grad_norm": 4.0863471031188965,
"learning_rate": 1.8892053862089378e-05,
"loss": 2.3461,
"step": 3500
},
{
"epoch": 0.21992522542335605,
"grad_norm": 4.746509075164795,
"learning_rate": 1.8707087895827168e-05,
"loss": 2.2806,
"step": 4000
},
{
"epoch": 0.24741587860127556,
"grad_norm": 4.339619159698486,
"learning_rate": 1.8522121929564962e-05,
"loss": 2.2746,
"step": 4500
},
{
"epoch": 0.27490653177919505,
"grad_norm": 4.018620014190674,
"learning_rate": 1.8337155963302752e-05,
"loss": 2.2403,
"step": 5000
},
{
"epoch": 0.3023971849571146,
"grad_norm": 4.380566120147705,
"learning_rate": 1.8152189997040546e-05,
"loss": 2.226,
"step": 5500
},
{
"epoch": 0.3298878381350341,
"grad_norm": 4.302157402038574,
"learning_rate": 1.796722403077834e-05,
"loss": 2.2119,
"step": 6000
},
{
"epoch": 0.3573784913129536,
"grad_norm": 4.434130668640137,
"learning_rate": 1.778225806451613e-05,
"loss": 2.2138,
"step": 6500
},
{
"epoch": 0.3848691444908731,
"grad_norm": 3.770848035812378,
"learning_rate": 1.759729209825392e-05,
"loss": 2.2103,
"step": 7000
},
{
"epoch": 0.4123597976687926,
"grad_norm": 2.9288535118103027,
"learning_rate": 1.7412326131991715e-05,
"loss": 2.1397,
"step": 7500
},
{
"epoch": 0.4398504508467121,
"grad_norm": 3.2082479000091553,
"learning_rate": 1.722736016572951e-05,
"loss": 2.2021,
"step": 8000
},
{
"epoch": 0.46734110402463164,
"grad_norm": 3.6131439208984375,
"learning_rate": 1.70423941994673e-05,
"loss": 2.2106,
"step": 8500
},
{
"epoch": 0.4948317572025511,
"grad_norm": 2.992415189743042,
"learning_rate": 1.6857798165137616e-05,
"loss": 2.1246,
"step": 9000
},
{
"epoch": 0.5223224103804707,
"grad_norm": 3.4553306102752686,
"learning_rate": 1.667283219887541e-05,
"loss": 2.0973,
"step": 9500
},
{
"epoch": 0.5498130635583901,
"grad_norm": 2.82578182220459,
"learning_rate": 1.64878662326132e-05,
"loss": 2.1257,
"step": 10000
},
{
"epoch": 0.5773037167363096,
"grad_norm": 2.674267292022705,
"learning_rate": 1.630290026635099e-05,
"loss": 2.13,
"step": 10500
},
{
"epoch": 0.6047943699142292,
"grad_norm": 3.27205491065979,
"learning_rate": 1.6117934300088784e-05,
"loss": 2.1855,
"step": 11000
},
{
"epoch": 0.6322850230921486,
"grad_norm": 3.042849540710449,
"learning_rate": 1.5932968333826578e-05,
"loss": 2.1532,
"step": 11500
},
{
"epoch": 0.6597756762700682,
"grad_norm": 3.618957042694092,
"learning_rate": 1.5748372299496894e-05,
"loss": 2.1202,
"step": 12000
},
{
"epoch": 0.6872663294479877,
"grad_norm": 3.6390328407287598,
"learning_rate": 1.5563406333234685e-05,
"loss": 2.0902,
"step": 12500
},
{
"epoch": 0.7147569826259071,
"grad_norm": 3.3778562545776367,
"learning_rate": 1.537844036697248e-05,
"loss": 2.0536,
"step": 13000
},
{
"epoch": 0.7422476358038267,
"grad_norm": 2.8149023056030273,
"learning_rate": 1.519347440071027e-05,
"loss": 2.1065,
"step": 13500
},
{
"epoch": 0.7697382889817462,
"grad_norm": 2.860196590423584,
"learning_rate": 1.5008508434448063e-05,
"loss": 2.0705,
"step": 14000
},
{
"epoch": 0.7972289421596657,
"grad_norm": 3.2587316036224365,
"learning_rate": 1.4823542468185853e-05,
"loss": 2.1082,
"step": 14500
},
{
"epoch": 0.8247195953375852,
"grad_norm": 2.4807844161987305,
"learning_rate": 1.4638576501923647e-05,
"loss": 2.0576,
"step": 15000
},
{
"epoch": 0.8522102485155048,
"grad_norm": 3.040374755859375,
"learning_rate": 1.445361053566144e-05,
"loss": 2.0694,
"step": 15500
},
{
"epoch": 0.8797009016934242,
"grad_norm": 3.2850606441497803,
"learning_rate": 1.4269014501331757e-05,
"loss": 2.081,
"step": 16000
},
{
"epoch": 0.9071915548713437,
"grad_norm": 3.131565570831299,
"learning_rate": 1.4084048535069548e-05,
"loss": 2.0567,
"step": 16500
},
{
"epoch": 0.9346822080492633,
"grad_norm": 2.221071481704712,
"learning_rate": 1.389908256880734e-05,
"loss": 2.0758,
"step": 17000
},
{
"epoch": 0.9621728612271827,
"grad_norm": 2.6275980472564697,
"learning_rate": 1.3714116602545134e-05,
"loss": 2.0495,
"step": 17500
},
{
"epoch": 0.9896635144051023,
"grad_norm": 3.2267441749572754,
"learning_rate": 1.3529150636282926e-05,
"loss": 2.0287,
"step": 18000
},
{
"epoch": 1.0171541675830218,
"grad_norm": 3.0593459606170654,
"learning_rate": 1.334455460195324e-05,
"loss": 2.0028,
"step": 18500
},
{
"epoch": 1.0446448207609413,
"grad_norm": 2.9170665740966797,
"learning_rate": 1.3159588635691035e-05,
"loss": 1.9644,
"step": 19000
},
{
"epoch": 1.0721354739388609,
"grad_norm": 3.075544834136963,
"learning_rate": 1.2974622669428827e-05,
"loss": 1.9723,
"step": 19500
},
{
"epoch": 1.0996261271167802,
"grad_norm": 2.9603986740112305,
"learning_rate": 1.2789656703166617e-05,
"loss": 1.997,
"step": 20000
},
{
"epoch": 1.1271167802946997,
"grad_norm": 2.951988458633423,
"learning_rate": 1.2605060668836935e-05,
"loss": 1.9418,
"step": 20500
},
{
"epoch": 1.1546074334726193,
"grad_norm": 3.427750587463379,
"learning_rate": 1.2420094702574727e-05,
"loss": 1.9553,
"step": 21000
},
{
"epoch": 1.1820980866505388,
"grad_norm": 2.6535024642944336,
"learning_rate": 1.223512873631252e-05,
"loss": 2.0072,
"step": 21500
},
{
"epoch": 1.2095887398284584,
"grad_norm": 2.9090585708618164,
"learning_rate": 1.2050162770050312e-05,
"loss": 1.9705,
"step": 22000
},
{
"epoch": 1.237079393006378,
"grad_norm": 3.2485570907592773,
"learning_rate": 1.1865196803788104e-05,
"loss": 1.9543,
"step": 22500
},
{
"epoch": 1.2645700461842972,
"grad_norm": 3.0418505668640137,
"learning_rate": 1.168060076945842e-05,
"loss": 2.0092,
"step": 23000
},
{
"epoch": 1.2920606993622168,
"grad_norm": 3.1117002964019775,
"learning_rate": 1.1495634803196214e-05,
"loss": 1.9702,
"step": 23500
},
{
"epoch": 1.3195513525401363,
"grad_norm": 2.928476095199585,
"learning_rate": 1.1310668836934005e-05,
"loss": 1.9813,
"step": 24000
},
{
"epoch": 1.3470420057180559,
"grad_norm": 3.026412010192871,
"learning_rate": 1.1125702870671797e-05,
"loss": 1.9192,
"step": 24500
},
{
"epoch": 1.3745326588959754,
"grad_norm": 3.093477249145508,
"learning_rate": 1.094073690440959e-05,
"loss": 1.9387,
"step": 25000
},
{
"epoch": 1.402023312073895,
"grad_norm": 2.920074701309204,
"learning_rate": 1.0756140870079905e-05,
"loss": 1.9651,
"step": 25500
},
{
"epoch": 1.4295139652518145,
"grad_norm": 2.5337624549865723,
"learning_rate": 1.0571174903817697e-05,
"loss": 1.9262,
"step": 26000
},
{
"epoch": 1.4570046184297338,
"grad_norm": 2.506801128387451,
"learning_rate": 1.0386208937555491e-05,
"loss": 1.9542,
"step": 26500
},
{
"epoch": 1.4844952716076534,
"grad_norm": 2.7995145320892334,
"learning_rate": 1.0201242971293283e-05,
"loss": 1.9609,
"step": 27000
},
{
"epoch": 1.511985924785573,
"grad_norm": 4.121292591094971,
"learning_rate": 1.0016646936963598e-05,
"loss": 1.9317,
"step": 27500
},
{
"epoch": 1.5394765779634925,
"grad_norm": 2.802678346633911,
"learning_rate": 9.831680970701392e-06,
"loss": 1.9519,
"step": 28000
},
{
"epoch": 1.5669672311414118,
"grad_norm": 2.965937852859497,
"learning_rate": 9.646715004439184e-06,
"loss": 1.9401,
"step": 28500
},
{
"epoch": 1.5944578843193313,
"grad_norm": 2.364539861679077,
"learning_rate": 9.461749038176976e-06,
"loss": 1.9686,
"step": 29000
},
{
"epoch": 1.6219485374972509,
"grad_norm": 2.8087072372436523,
"learning_rate": 9.276783071914768e-06,
"loss": 1.9046,
"step": 29500
},
{
"epoch": 1.6494391906751704,
"grad_norm": 3.0667874813079834,
"learning_rate": 9.092187037585085e-06,
"loss": 1.8992,
"step": 30000
},
{
"epoch": 1.67692984385309,
"grad_norm": 2.4543378353118896,
"learning_rate": 8.907221071322877e-06,
"loss": 1.8936,
"step": 30500
},
{
"epoch": 1.7044204970310095,
"grad_norm": 2.695256233215332,
"learning_rate": 8.72225510506067e-06,
"loss": 1.9429,
"step": 31000
},
{
"epoch": 1.731911150208929,
"grad_norm": 2.377511978149414,
"learning_rate": 8.537289138798461e-06,
"loss": 1.9787,
"step": 31500
},
{
"epoch": 1.7594018033868486,
"grad_norm": 2.74446964263916,
"learning_rate": 8.35269310446878e-06,
"loss": 1.9272,
"step": 32000
},
{
"epoch": 1.7868924565647681,
"grad_norm": 3.174065589904785,
"learning_rate": 8.16772713820657e-06,
"loss": 1.9744,
"step": 32500
},
{
"epoch": 1.8143831097426875,
"grad_norm": 3.6638684272766113,
"learning_rate": 7.982761171944364e-06,
"loss": 1.9441,
"step": 33000
},
{
"epoch": 1.841873762920607,
"grad_norm": 2.563633441925049,
"learning_rate": 7.797795205682154e-06,
"loss": 1.9422,
"step": 33500
},
{
"epoch": 1.8693644160985265,
"grad_norm": 2.817347764968872,
"learning_rate": 7.612829239419948e-06,
"loss": 1.9556,
"step": 34000
},
{
"epoch": 1.8968550692764459,
"grad_norm": 2.59664249420166,
"learning_rate": 7.428233205090264e-06,
"loss": 1.9754,
"step": 34500
},
{
"epoch": 1.9243457224543654,
"grad_norm": 2.615318775177002,
"learning_rate": 7.2432672388280566e-06,
"loss": 1.9349,
"step": 35000
},
{
"epoch": 1.951836375632285,
"grad_norm": 2.5411360263824463,
"learning_rate": 7.058301272565849e-06,
"loss": 1.9612,
"step": 35500
},
{
"epoch": 1.9793270288102045,
"grad_norm": 2.809213161468506,
"learning_rate": 6.873335306303641e-06,
"loss": 1.9433,
"step": 36000
},
{
"epoch": 2.006817681988124,
"grad_norm": 2.415309190750122,
"learning_rate": 6.688739271973957e-06,
"loss": 1.9222,
"step": 36500
},
{
"epoch": 2.0343083351660436,
"grad_norm": 2.3479654788970947,
"learning_rate": 6.5037733057117494e-06,
"loss": 1.8997,
"step": 37000
},
{
"epoch": 2.061798988343963,
"grad_norm": 2.566889762878418,
"learning_rate": 6.3188073394495416e-06,
"loss": 1.8604,
"step": 37500
},
{
"epoch": 2.0892896415218827,
"grad_norm": 2.9693121910095215,
"learning_rate": 6.133841373187334e-06,
"loss": 1.9029,
"step": 38000
},
{
"epoch": 2.1167802946998022,
"grad_norm": 3.270578145980835,
"learning_rate": 5.949245338857651e-06,
"loss": 1.8985,
"step": 38500
},
{
"epoch": 2.1442709478777218,
"grad_norm": 3.2845070362091064,
"learning_rate": 5.764279372595442e-06,
"loss": 1.8781,
"step": 39000
},
{
"epoch": 2.171761601055641,
"grad_norm": 3.1421778202056885,
"learning_rate": 5.579313406333235e-06,
"loss": 1.8677,
"step": 39500
},
{
"epoch": 2.1992522542335604,
"grad_norm": 2.8013062477111816,
"learning_rate": 5.394347440071027e-06,
"loss": 1.9078,
"step": 40000
},
{
"epoch": 2.22674290741148,
"grad_norm": 2.775585651397705,
"learning_rate": 5.209751405741345e-06,
"loss": 1.9292,
"step": 40500
},
{
"epoch": 2.2542335605893995,
"grad_norm": 2.672166585922241,
"learning_rate": 5.024785439479136e-06,
"loss": 1.8568,
"step": 41000
},
{
"epoch": 2.281724213767319,
"grad_norm": 2.4413459300994873,
"learning_rate": 4.839819473216928e-06,
"loss": 1.8813,
"step": 41500
},
{
"epoch": 2.3092148669452386,
"grad_norm": 3.139625072479248,
"learning_rate": 4.654853506954721e-06,
"loss": 1.8763,
"step": 42000
},
{
"epoch": 2.336705520123158,
"grad_norm": 2.938086986541748,
"learning_rate": 4.469887540692513e-06,
"loss": 1.9047,
"step": 42500
},
{
"epoch": 2.3641961733010777,
"grad_norm": 3.119866371154785,
"learning_rate": 4.28529150636283e-06,
"loss": 1.8985,
"step": 43000
},
{
"epoch": 2.391686826478997,
"grad_norm": 2.9585859775543213,
"learning_rate": 4.100325540100622e-06,
"loss": 1.8584,
"step": 43500
},
{
"epoch": 2.4191774796569168,
"grad_norm": 3.028029441833496,
"learning_rate": 3.915359573838414e-06,
"loss": 1.8514,
"step": 44000
},
{
"epoch": 2.4466681328348363,
"grad_norm": 2.885305643081665,
"learning_rate": 3.7303936075762065e-06,
"loss": 1.9058,
"step": 44500
},
{
"epoch": 2.474158786012756,
"grad_norm": 2.529195547103882,
"learning_rate": 3.5454276413139987e-06,
"loss": 1.899,
"step": 45000
},
{
"epoch": 2.5016494391906754,
"grad_norm": 2.8629934787750244,
"learning_rate": 3.360831606984315e-06,
"loss": 1.8854,
"step": 45500
},
{
"epoch": 2.5291400923685945,
"grad_norm": 3.1434662342071533,
"learning_rate": 3.1758656407221073e-06,
"loss": 1.8475,
"step": 46000
},
{
"epoch": 2.556630745546514,
"grad_norm": 2.7778401374816895,
"learning_rate": 2.9908996744598994e-06,
"loss": 1.893,
"step": 46500
},
{
"epoch": 2.5841213987244336,
"grad_norm": 3.024404525756836,
"learning_rate": 2.8059337081976915e-06,
"loss": 1.8792,
"step": 47000
},
{
"epoch": 2.611612051902353,
"grad_norm": 3.283930778503418,
"learning_rate": 2.6213376738680084e-06,
"loss": 1.8411,
"step": 47500
},
{
"epoch": 2.6391027050802727,
"grad_norm": 2.3540070056915283,
"learning_rate": 2.436371707605801e-06,
"loss": 1.8602,
"step": 48000
},
{
"epoch": 2.666593358258192,
"grad_norm": 2.4063596725463867,
"learning_rate": 2.251405741343593e-06,
"loss": 1.8422,
"step": 48500
},
{
"epoch": 2.6940840114361118,
"grad_norm": 2.7958314418792725,
"learning_rate": 2.0664397750813853e-06,
"loss": 1.8726,
"step": 49000
},
{
"epoch": 2.7215746646140313,
"grad_norm": 2.9718141555786133,
"learning_rate": 1.8814738088191776e-06,
"loss": 1.9078,
"step": 49500
},
{
"epoch": 2.749065317791951,
"grad_norm": 3.2070326805114746,
"learning_rate": 1.696877774489494e-06,
"loss": 1.8734,
"step": 50000
},
{
"epoch": 2.7765559709698704,
"grad_norm": 3.239140272140503,
"learning_rate": 1.5119118082272862e-06,
"loss": 1.8441,
"step": 50500
}
],
"logging_steps": 500,
"max_steps": 54564,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 1818,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.3300548304896e+16,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}