Qilex's picture
Upload 18 files
0bb9041 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 8.330556481172943,
"eval_steps": 500,
"global_step": 25000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.16661112962345884,
"grad_norm": 2.0231502056121826,
"learning_rate": 2.776852160390981e-06,
"loss": 8.4699,
"step": 500
},
{
"epoch": 0.3332222592469177,
"grad_norm": 1.640729546546936,
"learning_rate": 5.553704320781962e-06,
"loss": 7.446,
"step": 1000
},
{
"epoch": 0.49983338887037654,
"grad_norm": 1.6770328283309937,
"learning_rate": 8.330556481172942e-06,
"loss": 6.6034,
"step": 1500
},
{
"epoch": 0.6664445184938353,
"grad_norm": 2.338146924972534,
"learning_rate": 1.1107408641563923e-05,
"loss": 5.9787,
"step": 2000
},
{
"epoch": 0.8330556481172943,
"grad_norm": 2.557213068008423,
"learning_rate": 1.3884260801954905e-05,
"loss": 5.5732,
"step": 2500
},
{
"epoch": 0.9996667777407531,
"grad_norm": 1.7546786069869995,
"learning_rate": 1.6661112962345883e-05,
"loss": 5.2881,
"step": 3000
},
{
"epoch": 1.166277907364212,
"grad_norm": 1.7593083381652832,
"learning_rate": 1.9437965122736867e-05,
"loss": 5.0771,
"step": 3500
},
{
"epoch": 1.3328890369876707,
"grad_norm": 1.860612392425537,
"learning_rate": 2.2214817283127847e-05,
"loss": 4.8991,
"step": 4000
},
{
"epoch": 1.4995001666111296,
"grad_norm": 2.036371946334839,
"learning_rate": 2.4991669443518826e-05,
"loss": 4.7465,
"step": 4500
},
{
"epoch": 1.6661112962345883,
"grad_norm": 1.8470864295959473,
"learning_rate": 2.776852160390981e-05,
"loss": 4.5925,
"step": 5000
},
{
"epoch": 1.8327224258580475,
"grad_norm": 1.9861804246902466,
"learning_rate": 3.0545373764300786e-05,
"loss": 4.4767,
"step": 5500
},
{
"epoch": 1.9993335554815062,
"grad_norm": 2.018094062805176,
"learning_rate": 3.3322225924691766e-05,
"loss": 4.3708,
"step": 6000
},
{
"epoch": 2.165944685104965,
"grad_norm": 1.8410370349884033,
"learning_rate": 3.609352438076197e-05,
"loss": 4.2741,
"step": 6500
},
{
"epoch": 2.332555814728424,
"grad_norm": 1.7712337970733643,
"learning_rate": 3.887037654115295e-05,
"loss": 4.192,
"step": 7000
},
{
"epoch": 2.4991669443518827,
"grad_norm": 1.7352423667907715,
"learning_rate": 4.164722870154393e-05,
"loss": 4.1192,
"step": 7500
},
{
"epoch": 2.6657780739753414,
"grad_norm": 1.8459163904190063,
"learning_rate": 4.442408086193491e-05,
"loss": 4.0351,
"step": 8000
},
{
"epoch": 2.8323892035988005,
"grad_norm": 2.009974956512451,
"learning_rate": 4.719537931800511e-05,
"loss": 3.9709,
"step": 8500
},
{
"epoch": 2.9990003332222592,
"grad_norm": 1.7068768739700317,
"learning_rate": 4.997223147839609e-05,
"loss": 3.899,
"step": 9000
},
{
"epoch": 3.165611462845718,
"grad_norm": 1.6234753131866455,
"learning_rate": 5.274908363878708e-05,
"loss": 3.8237,
"step": 9500
},
{
"epoch": 3.332222592469177,
"grad_norm": 1.786528468132019,
"learning_rate": 5.552593579917805e-05,
"loss": 3.7704,
"step": 10000
},
{
"epoch": 3.4988337220926358,
"grad_norm": 1.7539526224136353,
"learning_rate": 5.829723425524826e-05,
"loss": 3.7183,
"step": 10500
},
{
"epoch": 3.665444851716095,
"grad_norm": 1.6715501546859741,
"learning_rate": 6.107408641563923e-05,
"loss": 3.6671,
"step": 11000
},
{
"epoch": 3.8320559813395536,
"grad_norm": 1.7561357021331787,
"learning_rate": 6.385093857603022e-05,
"loss": 3.6214,
"step": 11500
},
{
"epoch": 3.9986671109630123,
"grad_norm": 1.5936201810836792,
"learning_rate": 6.662779073642119e-05,
"loss": 3.5782,
"step": 12000
},
{
"epoch": 4.1652782405864714,
"grad_norm": 1.7363481521606445,
"learning_rate": 6.940464289681218e-05,
"loss": 3.5291,
"step": 12500
},
{
"epoch": 4.33188937020993,
"grad_norm": 1.5552008152008057,
"learning_rate": 7.217594135288238e-05,
"loss": 3.5017,
"step": 13000
},
{
"epoch": 4.498500499833389,
"grad_norm": 1.4608962535858154,
"learning_rate": 7.495279351327336e-05,
"loss": 3.4685,
"step": 13500
},
{
"epoch": 4.665111629456848,
"grad_norm": 1.470942735671997,
"learning_rate": 7.772964567366434e-05,
"loss": 3.4407,
"step": 14000
},
{
"epoch": 4.831722759080306,
"grad_norm": 1.4744900465011597,
"learning_rate": 8.050649783405532e-05,
"loss": 3.4156,
"step": 14500
},
{
"epoch": 4.998333888703765,
"grad_norm": 1.449549913406372,
"learning_rate": 8.32833499944463e-05,
"loss": 3.3933,
"step": 15000
},
{
"epoch": 5.1649450183272245,
"grad_norm": 1.4103004932403564,
"learning_rate": 8.606020215483728e-05,
"loss": 3.355,
"step": 15500
},
{
"epoch": 5.331556147950683,
"grad_norm": 1.3490815162658691,
"learning_rate": 8.883705431522826e-05,
"loss": 3.335,
"step": 16000
},
{
"epoch": 5.498167277574142,
"grad_norm": 1.3762222528457642,
"learning_rate": 9.161390647561924e-05,
"loss": 3.3183,
"step": 16500
},
{
"epoch": 5.664778407197601,
"grad_norm": 1.3008846044540405,
"learning_rate": 9.438520493168944e-05,
"loss": 3.3013,
"step": 17000
},
{
"epoch": 5.831389536821059,
"grad_norm": 1.3132846355438232,
"learning_rate": 9.716205709208043e-05,
"loss": 3.2846,
"step": 17500
},
{
"epoch": 5.9980006664445185,
"grad_norm": 1.194407343864441,
"learning_rate": 9.99389092524714e-05,
"loss": 3.2706,
"step": 18000
},
{
"epoch": 6.164611796067978,
"grad_norm": 1.1745057106018066,
"learning_rate": 9.996657887827809e-05,
"loss": 3.2324,
"step": 18500
},
{
"epoch": 6.331222925691436,
"grad_norm": 1.1769245862960815,
"learning_rate": 9.986361454903414e-05,
"loss": 3.2237,
"step": 19000
},
{
"epoch": 6.497834055314895,
"grad_norm": 1.1529046297073364,
"learning_rate": 9.969082137309689e-05,
"loss": 3.2103,
"step": 19500
},
{
"epoch": 6.664445184938354,
"grad_norm": 1.1225526332855225,
"learning_rate": 9.944857739312099e-05,
"loss": 3.1936,
"step": 20000
},
{
"epoch": 6.831056314561812,
"grad_norm": 1.1167833805084229,
"learning_rate": 9.913722118348934e-05,
"loss": 3.1818,
"step": 20500
},
{
"epoch": 6.9976674441852715,
"grad_norm": 1.093309998512268,
"learning_rate": 9.875801618300155e-05,
"loss": 3.1733,
"step": 21000
},
{
"epoch": 7.164278573808731,
"grad_norm": 1.1539673805236816,
"learning_rate": 9.83099727084671e-05,
"loss": 3.1342,
"step": 21500
},
{
"epoch": 7.330889703432189,
"grad_norm": 1.1930997371673584,
"learning_rate": 9.779440838491111e-05,
"loss": 3.1356,
"step": 22000
},
{
"epoch": 7.497500833055648,
"grad_norm": 1.0827438831329346,
"learning_rate": 9.721204379524642e-05,
"loss": 3.1231,
"step": 22500
},
{
"epoch": 7.664111962679107,
"grad_norm": 1.0850555896759033,
"learning_rate": 9.656505485544595e-05,
"loss": 3.1126,
"step": 23000
},
{
"epoch": 7.8307230923025655,
"grad_norm": 1.0119590759277344,
"learning_rate": 9.585175299579849e-05,
"loss": 3.1049,
"step": 23500
},
{
"epoch": 7.997334221926025,
"grad_norm": 1.0027412176132202,
"learning_rate": 9.507436603906176e-05,
"loss": 3.1004,
"step": 24000
},
{
"epoch": 8.163945351549483,
"grad_norm": 1.0899256467819214,
"learning_rate": 9.423398050680459e-05,
"loss": 3.0591,
"step": 24500
},
{
"epoch": 8.330556481172943,
"grad_norm": 1.0424436330795288,
"learning_rate": 9.333363626660517e-05,
"loss": 3.0582,
"step": 25000
}
],
"logging_steps": 500,
"max_steps": 60020,
"num_input_tokens_seen": 0,
"num_train_epochs": 20,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.046659236626432e+17,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}