mango-sft-model-7.8b / trainer_state.json
BIGGLZ-PUBLIC's picture
Upload folder using huggingface_hub
6e10720 verified
{
"best_global_step": 9000,
"best_metric": 0.5520676374435425,
"best_model_checkpoint": "/home/work/.workspace/exaone_RL_test/outputs/checkpoint-9000",
"epoch": 2.0,
"eval_steps": 1000,
"global_step": 9766,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0020479213598197828,
"grad_norm": 154.0,
"learning_rate": 3.071672354948806e-07,
"loss": 5.2426,
"step": 10
},
{
"epoch": 0.0040958427196395655,
"grad_norm": 134.0,
"learning_rate": 6.484641638225256e-07,
"loss": 5.0177,
"step": 20
},
{
"epoch": 0.006143764079459349,
"grad_norm": 101.0,
"learning_rate": 9.897610921501708e-07,
"loss": 4.6525,
"step": 30
},
{
"epoch": 0.008191685439279131,
"grad_norm": 64.0,
"learning_rate": 1.331058020477816e-06,
"loss": 3.8346,
"step": 40
},
{
"epoch": 0.010239606799098914,
"grad_norm": 44.0,
"learning_rate": 1.6723549488054607e-06,
"loss": 2.898,
"step": 50
},
{
"epoch": 0.012287528158918698,
"grad_norm": 35.75,
"learning_rate": 2.013651877133106e-06,
"loss": 1.9498,
"step": 60
},
{
"epoch": 0.014335449518738481,
"grad_norm": 29.375,
"learning_rate": 2.354948805460751e-06,
"loss": 1.3878,
"step": 70
},
{
"epoch": 0.016383370878558262,
"grad_norm": 10.6875,
"learning_rate": 2.696245733788396e-06,
"loss": 1.1501,
"step": 80
},
{
"epoch": 0.018431292238378045,
"grad_norm": 9.1875,
"learning_rate": 3.0375426621160415e-06,
"loss": 1.0169,
"step": 90
},
{
"epoch": 0.020479213598197828,
"grad_norm": 9.6875,
"learning_rate": 3.378839590443686e-06,
"loss": 0.8679,
"step": 100
},
{
"epoch": 0.02252713495801761,
"grad_norm": 7.59375,
"learning_rate": 3.7201365187713314e-06,
"loss": 0.8122,
"step": 110
},
{
"epoch": 0.024575056317837397,
"grad_norm": 8.25,
"learning_rate": 4.061433447098976e-06,
"loss": 0.7636,
"step": 120
},
{
"epoch": 0.02662297767765718,
"grad_norm": 7.9375,
"learning_rate": 4.402730375426622e-06,
"loss": 0.7755,
"step": 130
},
{
"epoch": 0.028670899037476962,
"grad_norm": 7.90625,
"learning_rate": 4.744027303754267e-06,
"loss": 0.749,
"step": 140
},
{
"epoch": 0.030718820397296745,
"grad_norm": 8.9375,
"learning_rate": 5.0853242320819115e-06,
"loss": 0.7375,
"step": 150
},
{
"epoch": 0.032766741757116524,
"grad_norm": 7.25,
"learning_rate": 5.426621160409556e-06,
"loss": 0.7443,
"step": 160
},
{
"epoch": 0.03481466311693631,
"grad_norm": 7.53125,
"learning_rate": 5.767918088737202e-06,
"loss": 0.7866,
"step": 170
},
{
"epoch": 0.03686258447675609,
"grad_norm": 6.5,
"learning_rate": 6.109215017064847e-06,
"loss": 0.719,
"step": 180
},
{
"epoch": 0.038910505836575876,
"grad_norm": 6.125,
"learning_rate": 6.450511945392492e-06,
"loss": 0.6936,
"step": 190
},
{
"epoch": 0.040958427196395655,
"grad_norm": 5.75,
"learning_rate": 6.7918088737201375e-06,
"loss": 0.7347,
"step": 200
},
{
"epoch": 0.04300634855621544,
"grad_norm": 5.96875,
"learning_rate": 7.133105802047782e-06,
"loss": 0.6707,
"step": 210
},
{
"epoch": 0.04505426991603522,
"grad_norm": 7.9375,
"learning_rate": 7.474402730375427e-06,
"loss": 0.6804,
"step": 220
},
{
"epoch": 0.04710219127585501,
"grad_norm": 5.71875,
"learning_rate": 7.815699658703072e-06,
"loss": 0.6831,
"step": 230
},
{
"epoch": 0.04915011263567479,
"grad_norm": 5.40625,
"learning_rate": 8.156996587030718e-06,
"loss": 0.634,
"step": 240
},
{
"epoch": 0.05119803399549457,
"grad_norm": 5.5625,
"learning_rate": 8.498293515358363e-06,
"loss": 0.6739,
"step": 250
},
{
"epoch": 0.05324595535531436,
"grad_norm": 4.4375,
"learning_rate": 8.839590443686009e-06,
"loss": 0.65,
"step": 260
},
{
"epoch": 0.05529387671513414,
"grad_norm": 5.5625,
"learning_rate": 9.180887372013653e-06,
"loss": 0.6803,
"step": 270
},
{
"epoch": 0.057341798074953924,
"grad_norm": 5.0625,
"learning_rate": 9.522184300341298e-06,
"loss": 0.7035,
"step": 280
},
{
"epoch": 0.059389719434773704,
"grad_norm": 5.875,
"learning_rate": 9.863481228668942e-06,
"loss": 0.7119,
"step": 290
},
{
"epoch": 0.06143764079459349,
"grad_norm": 6.4375,
"learning_rate": 9.999990101553574e-06,
"loss": 0.7058,
"step": 300
},
{
"epoch": 0.06348556215441327,
"grad_norm": 5.34375,
"learning_rate": 9.999929611189557e-06,
"loss": 0.6344,
"step": 310
},
{
"epoch": 0.06553348351423305,
"grad_norm": 3.6875,
"learning_rate": 9.999814130262912e-06,
"loss": 0.6832,
"step": 320
},
{
"epoch": 0.06758140487405284,
"grad_norm": 4.375,
"learning_rate": 9.999643660043727e-06,
"loss": 0.6186,
"step": 330
},
{
"epoch": 0.06962932623387262,
"grad_norm": 4.90625,
"learning_rate": 9.999418202406882e-06,
"loss": 0.7257,
"step": 340
},
{
"epoch": 0.0716772475936924,
"grad_norm": 5.0,
"learning_rate": 9.999137759832017e-06,
"loss": 0.6864,
"step": 350
},
{
"epoch": 0.07372516895351218,
"grad_norm": 4.53125,
"learning_rate": 9.998802335403514e-06,
"loss": 0.6495,
"step": 360
},
{
"epoch": 0.07577309031333197,
"grad_norm": 4.875,
"learning_rate": 9.998411932810465e-06,
"loss": 0.6305,
"step": 370
},
{
"epoch": 0.07782101167315175,
"grad_norm": 4.25,
"learning_rate": 9.997966556346617e-06,
"loss": 0.6956,
"step": 380
},
{
"epoch": 0.07986893303297153,
"grad_norm": 4.34375,
"learning_rate": 9.997466210910344e-06,
"loss": 0.5943,
"step": 390
},
{
"epoch": 0.08191685439279131,
"grad_norm": 6.71875,
"learning_rate": 9.996910902004576e-06,
"loss": 0.6226,
"step": 400
},
{
"epoch": 0.0839647757526111,
"grad_norm": 4.8125,
"learning_rate": 9.99630063573675e-06,
"loss": 0.6206,
"step": 410
},
{
"epoch": 0.08601269711243088,
"grad_norm": 4.625,
"learning_rate": 9.995635418818734e-06,
"loss": 0.6269,
"step": 420
},
{
"epoch": 0.08806061847225066,
"grad_norm": 4.5625,
"learning_rate": 9.994915258566766e-06,
"loss": 0.5965,
"step": 430
},
{
"epoch": 0.09010853983207044,
"grad_norm": 4.5625,
"learning_rate": 9.994140162901362e-06,
"loss": 0.6204,
"step": 440
},
{
"epoch": 0.09215646119189023,
"grad_norm": 4.40625,
"learning_rate": 9.993310140347227e-06,
"loss": 0.687,
"step": 450
},
{
"epoch": 0.09420438255171001,
"grad_norm": 4.625,
"learning_rate": 9.992425200033173e-06,
"loss": 0.5611,
"step": 460
},
{
"epoch": 0.0962523039115298,
"grad_norm": 4.21875,
"learning_rate": 9.99148535169201e-06,
"loss": 0.6094,
"step": 470
},
{
"epoch": 0.09830022527134959,
"grad_norm": 4.90625,
"learning_rate": 9.990490605660441e-06,
"loss": 0.6283,
"step": 480
},
{
"epoch": 0.10034814663116937,
"grad_norm": 4.96875,
"learning_rate": 9.98944097287895e-06,
"loss": 0.6276,
"step": 490
},
{
"epoch": 0.10239606799098915,
"grad_norm": 4.65625,
"learning_rate": 9.988336464891677e-06,
"loss": 0.6535,
"step": 500
},
{
"epoch": 0.10444398935080892,
"grad_norm": 5.03125,
"learning_rate": 9.987177093846299e-06,
"loss": 0.6377,
"step": 510
},
{
"epoch": 0.10649191071062872,
"grad_norm": 3.71875,
"learning_rate": 9.985962872493885e-06,
"loss": 0.6542,
"step": 520
},
{
"epoch": 0.1085398320704485,
"grad_norm": 4.40625,
"learning_rate": 9.984693814188769e-06,
"loss": 0.6088,
"step": 530
},
{
"epoch": 0.11058775343026828,
"grad_norm": 4.34375,
"learning_rate": 9.983369932888393e-06,
"loss": 0.6549,
"step": 540
},
{
"epoch": 0.11263567479008806,
"grad_norm": 3.75,
"learning_rate": 9.981991243153156e-06,
"loss": 0.5653,
"step": 550
},
{
"epoch": 0.11468359614990785,
"grad_norm": 3.484375,
"learning_rate": 9.980557760146259e-06,
"loss": 0.6158,
"step": 560
},
{
"epoch": 0.11673151750972763,
"grad_norm": 3.875,
"learning_rate": 9.979069499633528e-06,
"loss": 0.6301,
"step": 570
},
{
"epoch": 0.11877943886954741,
"grad_norm": 3.375,
"learning_rate": 9.977526477983252e-06,
"loss": 0.5643,
"step": 580
},
{
"epoch": 0.12082736022936719,
"grad_norm": 4.21875,
"learning_rate": 9.975928712165997e-06,
"loss": 0.6924,
"step": 590
},
{
"epoch": 0.12287528158918698,
"grad_norm": 4.6875,
"learning_rate": 9.974276219754416e-06,
"loss": 0.6238,
"step": 600
},
{
"epoch": 0.12492320294900676,
"grad_norm": 4.34375,
"learning_rate": 9.972569018923064e-06,
"loss": 0.6142,
"step": 610
},
{
"epoch": 0.12697112430882654,
"grad_norm": 4.03125,
"learning_rate": 9.97080712844819e-06,
"loss": 0.5602,
"step": 620
},
{
"epoch": 0.12901904566864633,
"grad_norm": 4.96875,
"learning_rate": 9.96899056770754e-06,
"loss": 0.5933,
"step": 630
},
{
"epoch": 0.1310669670284661,
"grad_norm": 4.34375,
"learning_rate": 9.967119356680131e-06,
"loss": 0.591,
"step": 640
},
{
"epoch": 0.1331148883882859,
"grad_norm": 4.46875,
"learning_rate": 9.965193515946045e-06,
"loss": 0.594,
"step": 650
},
{
"epoch": 0.13516280974810568,
"grad_norm": 4.40625,
"learning_rate": 9.963213066686193e-06,
"loss": 0.6271,
"step": 660
},
{
"epoch": 0.13721073110792545,
"grad_norm": 4.4375,
"learning_rate": 9.961178030682083e-06,
"loss": 0.6486,
"step": 670
},
{
"epoch": 0.13925865246774524,
"grad_norm": 4.25,
"learning_rate": 9.959088430315587e-06,
"loss": 0.641,
"step": 680
},
{
"epoch": 0.14130657382756504,
"grad_norm": 4.375,
"learning_rate": 9.956944288568689e-06,
"loss": 0.6058,
"step": 690
},
{
"epoch": 0.1433544951873848,
"grad_norm": 4.3125,
"learning_rate": 9.95474562902323e-06,
"loss": 0.6139,
"step": 700
},
{
"epoch": 0.1454024165472046,
"grad_norm": 4.53125,
"learning_rate": 9.952492475860662e-06,
"loss": 0.5889,
"step": 710
},
{
"epoch": 0.14745033790702436,
"grad_norm": 3.796875,
"learning_rate": 9.95018485386176e-06,
"loss": 0.5904,
"step": 720
},
{
"epoch": 0.14949825926684415,
"grad_norm": 4.21875,
"learning_rate": 9.947822788406367e-06,
"loss": 0.6628,
"step": 730
},
{
"epoch": 0.15154618062666395,
"grad_norm": 3.59375,
"learning_rate": 9.945406305473116e-06,
"loss": 0.555,
"step": 740
},
{
"epoch": 0.1535941019864837,
"grad_norm": 4.3125,
"learning_rate": 9.942935431639128e-06,
"loss": 0.6128,
"step": 750
},
{
"epoch": 0.1556420233463035,
"grad_norm": 4.1875,
"learning_rate": 9.940410194079736e-06,
"loss": 0.5984,
"step": 760
},
{
"epoch": 0.1576899447061233,
"grad_norm": 3.53125,
"learning_rate": 9.937830620568176e-06,
"loss": 0.6214,
"step": 770
},
{
"epoch": 0.15973786606594306,
"grad_norm": 3.671875,
"learning_rate": 9.935196739475287e-06,
"loss": 0.5355,
"step": 780
},
{
"epoch": 0.16178578742576286,
"grad_norm": 4.5625,
"learning_rate": 9.932508579769203e-06,
"loss": 0.6303,
"step": 790
},
{
"epoch": 0.16383370878558262,
"grad_norm": 3.9375,
"learning_rate": 9.929766171015015e-06,
"loss": 0.5743,
"step": 800
},
{
"epoch": 0.16588163014540241,
"grad_norm": 3.625,
"learning_rate": 9.926969543374473e-06,
"loss": 0.5829,
"step": 810
},
{
"epoch": 0.1679295515052222,
"grad_norm": 4.75,
"learning_rate": 9.924118727605633e-06,
"loss": 0.6498,
"step": 820
},
{
"epoch": 0.16997747286504197,
"grad_norm": 4.21875,
"learning_rate": 9.921213755062531e-06,
"loss": 0.6007,
"step": 830
},
{
"epoch": 0.17202539422486177,
"grad_norm": 4.46875,
"learning_rate": 9.918254657694831e-06,
"loss": 0.639,
"step": 840
},
{
"epoch": 0.17407331558468156,
"grad_norm": 3.28125,
"learning_rate": 9.915241468047476e-06,
"loss": 0.5943,
"step": 850
},
{
"epoch": 0.17612123694450132,
"grad_norm": 3.65625,
"learning_rate": 9.912174219260332e-06,
"loss": 0.5616,
"step": 860
},
{
"epoch": 0.17816915830432112,
"grad_norm": 4.84375,
"learning_rate": 9.90905294506782e-06,
"loss": 0.5864,
"step": 870
},
{
"epoch": 0.18021707966414088,
"grad_norm": 3.796875,
"learning_rate": 9.905877679798552e-06,
"loss": 0.6337,
"step": 880
},
{
"epoch": 0.18226500102396068,
"grad_norm": 4.25,
"learning_rate": 9.90264845837494e-06,
"loss": 0.6289,
"step": 890
},
{
"epoch": 0.18431292238378047,
"grad_norm": 3.328125,
"learning_rate": 9.899365316312826e-06,
"loss": 0.5829,
"step": 900
},
{
"epoch": 0.18636084374360024,
"grad_norm": 4.90625,
"learning_rate": 9.896028289721085e-06,
"loss": 0.6398,
"step": 910
},
{
"epoch": 0.18840876510342003,
"grad_norm": 3.765625,
"learning_rate": 9.892637415301227e-06,
"loss": 0.5661,
"step": 920
},
{
"epoch": 0.19045668646323982,
"grad_norm": 3.765625,
"learning_rate": 9.889192730346994e-06,
"loss": 0.6144,
"step": 930
},
{
"epoch": 0.1925046078230596,
"grad_norm": 4.375,
"learning_rate": 9.885694272743955e-06,
"loss": 0.5456,
"step": 940
},
{
"epoch": 0.19455252918287938,
"grad_norm": 5.53125,
"learning_rate": 9.882142080969082e-06,
"loss": 0.6665,
"step": 950
},
{
"epoch": 0.19660045054269917,
"grad_norm": 3.515625,
"learning_rate": 9.878536194090326e-06,
"loss": 0.6107,
"step": 960
},
{
"epoch": 0.19864837190251894,
"grad_norm": 3.359375,
"learning_rate": 9.874876651766198e-06,
"loss": 0.5805,
"step": 970
},
{
"epoch": 0.20069629326233873,
"grad_norm": 3.984375,
"learning_rate": 9.871163494245324e-06,
"loss": 0.6071,
"step": 980
},
{
"epoch": 0.2027442146221585,
"grad_norm": 4.25,
"learning_rate": 9.867396762366e-06,
"loss": 0.6215,
"step": 990
},
{
"epoch": 0.2047921359819783,
"grad_norm": 5.1875,
"learning_rate": 9.863576497555752e-06,
"loss": 0.6455,
"step": 1000
},
{
"epoch": 0.2047921359819783,
"eval_loss": 0.5988336801528931,
"eval_runtime": 49.5583,
"eval_samples_per_second": 82.973,
"eval_steps_per_second": 41.487,
"step": 1000
},
{
"epoch": 0.20684005734179808,
"grad_norm": 3.609375,
"learning_rate": 9.859702741830873e-06,
"loss": 0.546,
"step": 1010
},
{
"epoch": 0.20888797870161785,
"grad_norm": 4.125,
"learning_rate": 9.855775537795965e-06,
"loss": 0.6586,
"step": 1020
},
{
"epoch": 0.21093590006143764,
"grad_norm": 4.625,
"learning_rate": 9.851794928643465e-06,
"loss": 0.6279,
"step": 1030
},
{
"epoch": 0.21298382142125744,
"grad_norm": 3.296875,
"learning_rate": 9.847760958153183e-06,
"loss": 0.5474,
"step": 1040
},
{
"epoch": 0.2150317427810772,
"grad_norm": 3.640625,
"learning_rate": 9.8436736706918e-06,
"loss": 0.58,
"step": 1050
},
{
"epoch": 0.217079664140897,
"grad_norm": 3.515625,
"learning_rate": 9.839533111212395e-06,
"loss": 0.6043,
"step": 1060
},
{
"epoch": 0.21912758550071676,
"grad_norm": 4.3125,
"learning_rate": 9.835339325253954e-06,
"loss": 0.6341,
"step": 1070
},
{
"epoch": 0.22117550686053655,
"grad_norm": 4.0625,
"learning_rate": 9.831092358940853e-06,
"loss": 0.5638,
"step": 1080
},
{
"epoch": 0.22322342822035635,
"grad_norm": 3.3125,
"learning_rate": 9.82679225898236e-06,
"loss": 0.6114,
"step": 1090
},
{
"epoch": 0.2252713495801761,
"grad_norm": 4.90625,
"learning_rate": 9.82243907267213e-06,
"loss": 0.6526,
"step": 1100
},
{
"epoch": 0.2273192709399959,
"grad_norm": 4.625,
"learning_rate": 9.818032847887664e-06,
"loss": 0.5896,
"step": 1110
},
{
"epoch": 0.2293671922998157,
"grad_norm": 3.453125,
"learning_rate": 9.813573633089804e-06,
"loss": 0.5732,
"step": 1120
},
{
"epoch": 0.23141511365963546,
"grad_norm": 3.71875,
"learning_rate": 9.809061477322186e-06,
"loss": 0.6223,
"step": 1130
},
{
"epoch": 0.23346303501945526,
"grad_norm": 4.21875,
"learning_rate": 9.804496430210704e-06,
"loss": 0.6012,
"step": 1140
},
{
"epoch": 0.23551095637927505,
"grad_norm": 4.0,
"learning_rate": 9.799878541962968e-06,
"loss": 0.6064,
"step": 1150
},
{
"epoch": 0.23755887773909481,
"grad_norm": 3.828125,
"learning_rate": 9.795207863367745e-06,
"loss": 0.553,
"step": 1160
},
{
"epoch": 0.2396067990989146,
"grad_norm": 4.53125,
"learning_rate": 9.790484445794413e-06,
"loss": 0.6526,
"step": 1170
},
{
"epoch": 0.24165472045873437,
"grad_norm": 4.25,
"learning_rate": 9.785708341192375e-06,
"loss": 0.6117,
"step": 1180
},
{
"epoch": 0.24370264181855417,
"grad_norm": 3.828125,
"learning_rate": 9.78087960209051e-06,
"loss": 0.5773,
"step": 1190
},
{
"epoch": 0.24575056317837396,
"grad_norm": 4.71875,
"learning_rate": 9.77599828159658e-06,
"loss": 0.6004,
"step": 1200
},
{
"epoch": 0.24779848453819373,
"grad_norm": 4.40625,
"learning_rate": 9.771064433396652e-06,
"loss": 0.5644,
"step": 1210
},
{
"epoch": 0.24984640589801352,
"grad_norm": 3.75,
"learning_rate": 9.76607811175451e-06,
"loss": 0.5899,
"step": 1220
},
{
"epoch": 0.2518943272578333,
"grad_norm": 4.78125,
"learning_rate": 9.761039371511051e-06,
"loss": 0.5757,
"step": 1230
},
{
"epoch": 0.2539422486176531,
"grad_norm": 3.6875,
"learning_rate": 9.755948268083689e-06,
"loss": 0.5837,
"step": 1240
},
{
"epoch": 0.25599016997747287,
"grad_norm": 4.21875,
"learning_rate": 9.750804857465736e-06,
"loss": 0.5767,
"step": 1250
},
{
"epoch": 0.25803809133729266,
"grad_norm": 3.0625,
"learning_rate": 9.745609196225804e-06,
"loss": 0.6256,
"step": 1260
},
{
"epoch": 0.26008601269711246,
"grad_norm": 3.9375,
"learning_rate": 9.740361341507158e-06,
"loss": 0.5644,
"step": 1270
},
{
"epoch": 0.2621339340569322,
"grad_norm": 3.859375,
"learning_rate": 9.735061351027114e-06,
"loss": 0.6039,
"step": 1280
},
{
"epoch": 0.264181855416752,
"grad_norm": 3.75,
"learning_rate": 9.729709283076375e-06,
"loss": 0.5902,
"step": 1290
},
{
"epoch": 0.2662297767765718,
"grad_norm": 3.921875,
"learning_rate": 9.724305196518425e-06,
"loss": 0.5738,
"step": 1300
},
{
"epoch": 0.2682776981363916,
"grad_norm": 3.75,
"learning_rate": 9.718849150788848e-06,
"loss": 0.5882,
"step": 1310
},
{
"epoch": 0.27032561949621137,
"grad_norm": 3.65625,
"learning_rate": 9.713341205894691e-06,
"loss": 0.5549,
"step": 1320
},
{
"epoch": 0.2723735408560311,
"grad_norm": 3.578125,
"learning_rate": 9.707781422413811e-06,
"loss": 0.5866,
"step": 1330
},
{
"epoch": 0.2744214622158509,
"grad_norm": 3.671875,
"learning_rate": 9.702169861494189e-06,
"loss": 0.6095,
"step": 1340
},
{
"epoch": 0.2764693835756707,
"grad_norm": 3.890625,
"learning_rate": 9.696506584853271e-06,
"loss": 0.5493,
"step": 1350
},
{
"epoch": 0.2785173049354905,
"grad_norm": 4.125,
"learning_rate": 9.690791654777293e-06,
"loss": 0.6172,
"step": 1360
},
{
"epoch": 0.2805652262953103,
"grad_norm": 3.046875,
"learning_rate": 9.685025134120577e-06,
"loss": 0.5721,
"step": 1370
},
{
"epoch": 0.28261314765513007,
"grad_norm": 4.84375,
"learning_rate": 9.679207086304864e-06,
"loss": 0.5712,
"step": 1380
},
{
"epoch": 0.2846610690149498,
"grad_norm": 3.890625,
"learning_rate": 9.673337575318597e-06,
"loss": 0.6429,
"step": 1390
},
{
"epoch": 0.2867089903747696,
"grad_norm": 4.3125,
"learning_rate": 9.66741666571623e-06,
"loss": 0.6728,
"step": 1400
},
{
"epoch": 0.2887569117345894,
"grad_norm": 4.375,
"learning_rate": 9.661444422617505e-06,
"loss": 0.6315,
"step": 1410
},
{
"epoch": 0.2908048330944092,
"grad_norm": 4.3125,
"learning_rate": 9.655420911706752e-06,
"loss": 0.5544,
"step": 1420
},
{
"epoch": 0.292852754454229,
"grad_norm": 3.796875,
"learning_rate": 9.649346199232154e-06,
"loss": 0.5458,
"step": 1430
},
{
"epoch": 0.2949006758140487,
"grad_norm": 4.28125,
"learning_rate": 9.643220352005023e-06,
"loss": 0.5573,
"step": 1440
},
{
"epoch": 0.2969485971738685,
"grad_norm": 3.75,
"learning_rate": 9.637043437399063e-06,
"loss": 0.6293,
"step": 1450
},
{
"epoch": 0.2989965185336883,
"grad_norm": 3.296875,
"learning_rate": 9.630815523349636e-06,
"loss": 0.5829,
"step": 1460
},
{
"epoch": 0.3010444398935081,
"grad_norm": 3.46875,
"learning_rate": 9.624536678352998e-06,
"loss": 0.5512,
"step": 1470
},
{
"epoch": 0.3030923612533279,
"grad_norm": 3.65625,
"learning_rate": 9.618206971465574e-06,
"loss": 0.5756,
"step": 1480
},
{
"epoch": 0.30514028261314763,
"grad_norm": 3.5625,
"learning_rate": 9.611826472303165e-06,
"loss": 0.5815,
"step": 1490
},
{
"epoch": 0.3071882039729674,
"grad_norm": 3.515625,
"learning_rate": 9.605395251040212e-06,
"loss": 0.575,
"step": 1500
},
{
"epoch": 0.3092361253327872,
"grad_norm": 3.484375,
"learning_rate": 9.598913378409006e-06,
"loss": 0.661,
"step": 1510
},
{
"epoch": 0.311284046692607,
"grad_norm": 3.21875,
"learning_rate": 9.592380925698911e-06,
"loss": 0.5835,
"step": 1520
},
{
"epoch": 0.3133319680524268,
"grad_norm": 4.53125,
"learning_rate": 9.585797964755593e-06,
"loss": 0.5867,
"step": 1530
},
{
"epoch": 0.3153798894122466,
"grad_norm": 3.875,
"learning_rate": 9.579164567980212e-06,
"loss": 0.5765,
"step": 1540
},
{
"epoch": 0.31742781077206633,
"grad_norm": 3.328125,
"learning_rate": 9.572480808328643e-06,
"loss": 0.5486,
"step": 1550
},
{
"epoch": 0.3194757321318861,
"grad_norm": 3.65625,
"learning_rate": 9.565746759310659e-06,
"loss": 0.6179,
"step": 1560
},
{
"epoch": 0.3215236534917059,
"grad_norm": 4.28125,
"learning_rate": 9.558962494989133e-06,
"loss": 0.6122,
"step": 1570
},
{
"epoch": 0.3235715748515257,
"grad_norm": 4.0625,
"learning_rate": 9.552128089979215e-06,
"loss": 0.5539,
"step": 1580
},
{
"epoch": 0.3256194962113455,
"grad_norm": 3.84375,
"learning_rate": 9.545243619447524e-06,
"loss": 0.5897,
"step": 1590
},
{
"epoch": 0.32766741757116524,
"grad_norm": 4.15625,
"learning_rate": 9.538309159111303e-06,
"loss": 0.5231,
"step": 1600
},
{
"epoch": 0.32971533893098504,
"grad_norm": 4.15625,
"learning_rate": 9.531324785237603e-06,
"loss": 0.6409,
"step": 1610
},
{
"epoch": 0.33176326029080483,
"grad_norm": 3.109375,
"learning_rate": 9.524290574642432e-06,
"loss": 0.5647,
"step": 1620
},
{
"epoch": 0.3338111816506246,
"grad_norm": 3.3125,
"learning_rate": 9.517206604689924e-06,
"loss": 0.6115,
"step": 1630
},
{
"epoch": 0.3358591030104444,
"grad_norm": 4.21875,
"learning_rate": 9.51007295329147e-06,
"loss": 0.5784,
"step": 1640
},
{
"epoch": 0.3379070243702642,
"grad_norm": 3.125,
"learning_rate": 9.502889698904877e-06,
"loss": 0.5442,
"step": 1650
},
{
"epoch": 0.33995494573008395,
"grad_norm": 3.8125,
"learning_rate": 9.495656920533499e-06,
"loss": 0.5771,
"step": 1660
},
{
"epoch": 0.34200286708990374,
"grad_norm": 3.78125,
"learning_rate": 9.488374697725361e-06,
"loss": 0.5675,
"step": 1670
},
{
"epoch": 0.34405078844972353,
"grad_norm": 4.1875,
"learning_rate": 9.481043110572301e-06,
"loss": 0.6298,
"step": 1680
},
{
"epoch": 0.3460987098095433,
"grad_norm": 5.0625,
"learning_rate": 9.473662239709074e-06,
"loss": 0.6504,
"step": 1690
},
{
"epoch": 0.3481466311693631,
"grad_norm": 3.828125,
"learning_rate": 9.46623216631247e-06,
"loss": 0.606,
"step": 1700
},
{
"epoch": 0.35019455252918286,
"grad_norm": 3.625,
"learning_rate": 9.458752972100425e-06,
"loss": 0.6153,
"step": 1710
},
{
"epoch": 0.35224247388900265,
"grad_norm": 3.484375,
"learning_rate": 9.451224739331113e-06,
"loss": 0.5412,
"step": 1720
},
{
"epoch": 0.35429039524882244,
"grad_norm": 3.5,
"learning_rate": 9.443647550802052e-06,
"loss": 0.5356,
"step": 1730
},
{
"epoch": 0.35633831660864224,
"grad_norm": 5.1875,
"learning_rate": 9.436021489849189e-06,
"loss": 0.6106,
"step": 1740
},
{
"epoch": 0.35838623796846203,
"grad_norm": 3.921875,
"learning_rate": 9.42834664034598e-06,
"loss": 0.5864,
"step": 1750
},
{
"epoch": 0.36043415932828177,
"grad_norm": 4.03125,
"learning_rate": 9.420623086702469e-06,
"loss": 0.5616,
"step": 1760
},
{
"epoch": 0.36248208068810156,
"grad_norm": 3.4375,
"learning_rate": 9.41285091386437e-06,
"loss": 0.5831,
"step": 1770
},
{
"epoch": 0.36453000204792135,
"grad_norm": 3.640625,
"learning_rate": 9.405030207312113e-06,
"loss": 0.5746,
"step": 1780
},
{
"epoch": 0.36657792340774115,
"grad_norm": 3.5,
"learning_rate": 9.397161053059923e-06,
"loss": 0.5429,
"step": 1790
},
{
"epoch": 0.36862584476756094,
"grad_norm": 2.953125,
"learning_rate": 9.389243537654857e-06,
"loss": 0.5584,
"step": 1800
},
{
"epoch": 0.37067376612738073,
"grad_norm": 3.90625,
"learning_rate": 9.381277748175872e-06,
"loss": 0.6123,
"step": 1810
},
{
"epoch": 0.37272168748720047,
"grad_norm": 4.25,
"learning_rate": 9.373263772232848e-06,
"loss": 0.597,
"step": 1820
},
{
"epoch": 0.37476960884702026,
"grad_norm": 4.0625,
"learning_rate": 9.365201697965634e-06,
"loss": 0.5578,
"step": 1830
},
{
"epoch": 0.37681753020684006,
"grad_norm": 3.890625,
"learning_rate": 9.357091614043078e-06,
"loss": 0.6015,
"step": 1840
},
{
"epoch": 0.37886545156665985,
"grad_norm": 3.515625,
"learning_rate": 9.348933609662055e-06,
"loss": 0.5694,
"step": 1850
},
{
"epoch": 0.38091337292647964,
"grad_norm": 3.453125,
"learning_rate": 9.340727774546476e-06,
"loss": 0.5754,
"step": 1860
},
{
"epoch": 0.3829612942862994,
"grad_norm": 3.609375,
"learning_rate": 9.332474198946308e-06,
"loss": 0.5598,
"step": 1870
},
{
"epoch": 0.3850092156461192,
"grad_norm": 3.3125,
"learning_rate": 9.324172973636583e-06,
"loss": 0.5535,
"step": 1880
},
{
"epoch": 0.38705713700593897,
"grad_norm": 3.890625,
"learning_rate": 9.315824189916398e-06,
"loss": 0.5996,
"step": 1890
},
{
"epoch": 0.38910505836575876,
"grad_norm": 4.25,
"learning_rate": 9.307427939607906e-06,
"loss": 0.5701,
"step": 1900
},
{
"epoch": 0.39115297972557855,
"grad_norm": 3.53125,
"learning_rate": 9.298984315055316e-06,
"loss": 0.5873,
"step": 1910
},
{
"epoch": 0.39320090108539835,
"grad_norm": 3.515625,
"learning_rate": 9.290493409123864e-06,
"loss": 0.5693,
"step": 1920
},
{
"epoch": 0.3952488224452181,
"grad_norm": 3.84375,
"learning_rate": 9.281955315198805e-06,
"loss": 0.5408,
"step": 1930
},
{
"epoch": 0.3972967438050379,
"grad_norm": 3.203125,
"learning_rate": 9.273370127184382e-06,
"loss": 0.582,
"step": 1940
},
{
"epoch": 0.39934466516485767,
"grad_norm": 3.28125,
"learning_rate": 9.264737939502784e-06,
"loss": 0.5886,
"step": 1950
},
{
"epoch": 0.40139258652467746,
"grad_norm": 4.34375,
"learning_rate": 9.256058847093122e-06,
"loss": 0.5457,
"step": 1960
},
{
"epoch": 0.40344050788449726,
"grad_norm": 4.0,
"learning_rate": 9.24733294541037e-06,
"loss": 0.5653,
"step": 1970
},
{
"epoch": 0.405488429244317,
"grad_norm": 4.03125,
"learning_rate": 9.238560330424333e-06,
"loss": 0.6059,
"step": 1980
},
{
"epoch": 0.4075363506041368,
"grad_norm": 4.1875,
"learning_rate": 9.229741098618573e-06,
"loss": 0.5621,
"step": 1990
},
{
"epoch": 0.4095842719639566,
"grad_norm": 3.578125,
"learning_rate": 9.220875346989363e-06,
"loss": 0.5432,
"step": 2000
},
{
"epoch": 0.4095842719639566,
"eval_loss": 0.5784266591072083,
"eval_runtime": 49.5386,
"eval_samples_per_second": 83.006,
"eval_steps_per_second": 41.503,
"step": 2000
},
{
"epoch": 0.4116321933237764,
"grad_norm": 3.09375,
"learning_rate": 9.211963173044604e-06,
"loss": 0.5743,
"step": 2010
},
{
"epoch": 0.41368011468359617,
"grad_norm": 4.1875,
"learning_rate": 9.203004674802774e-06,
"loss": 0.613,
"step": 2020
},
{
"epoch": 0.41572803604341596,
"grad_norm": 3.4375,
"learning_rate": 9.193999950791825e-06,
"loss": 0.5318,
"step": 2030
},
{
"epoch": 0.4177759574032357,
"grad_norm": 3.1875,
"learning_rate": 9.184949100048123e-06,
"loss": 0.5718,
"step": 2040
},
{
"epoch": 0.4198238787630555,
"grad_norm": 3.578125,
"learning_rate": 9.175852222115346e-06,
"loss": 0.5876,
"step": 2050
},
{
"epoch": 0.4218718001228753,
"grad_norm": 4.15625,
"learning_rate": 9.166709417043386e-06,
"loss": 0.5436,
"step": 2060
},
{
"epoch": 0.4239197214826951,
"grad_norm": 4.21875,
"learning_rate": 9.15752078538726e-06,
"loss": 0.5473,
"step": 2070
},
{
"epoch": 0.42596764284251487,
"grad_norm": 3.5,
"learning_rate": 9.148286428205994e-06,
"loss": 0.6101,
"step": 2080
},
{
"epoch": 0.4280155642023346,
"grad_norm": 4.625,
"learning_rate": 9.139006447061518e-06,
"loss": 0.5197,
"step": 2090
},
{
"epoch": 0.4300634855621544,
"grad_norm": 4.25,
"learning_rate": 9.129680944017544e-06,
"loss": 0.5946,
"step": 2100
},
{
"epoch": 0.4321114069219742,
"grad_norm": 4.5,
"learning_rate": 9.120310021638451e-06,
"loss": 0.5866,
"step": 2110
},
{
"epoch": 0.434159328281794,
"grad_norm": 3.65625,
"learning_rate": 9.110893782988148e-06,
"loss": 0.5906,
"step": 2120
},
{
"epoch": 0.4362072496416138,
"grad_norm": 3.28125,
"learning_rate": 9.101432331628943e-06,
"loss": 0.5084,
"step": 2130
},
{
"epoch": 0.4382551710014335,
"grad_norm": 3.875,
"learning_rate": 9.09192577162041e-06,
"loss": 0.5464,
"step": 2140
},
{
"epoch": 0.4403030923612533,
"grad_norm": 4.0,
"learning_rate": 9.082374207518234e-06,
"loss": 0.5649,
"step": 2150
},
{
"epoch": 0.4423510137210731,
"grad_norm": 3.515625,
"learning_rate": 9.072777744373073e-06,
"loss": 0.5508,
"step": 2160
},
{
"epoch": 0.4443989350808929,
"grad_norm": 3.265625,
"learning_rate": 9.063136487729397e-06,
"loss": 0.5607,
"step": 2170
},
{
"epoch": 0.4464468564407127,
"grad_norm": 3.546875,
"learning_rate": 9.05345054362432e-06,
"loss": 0.5629,
"step": 2180
},
{
"epoch": 0.4484947778005325,
"grad_norm": 4.09375,
"learning_rate": 9.043720018586447e-06,
"loss": 0.574,
"step": 2190
},
{
"epoch": 0.4505426991603522,
"grad_norm": 4.125,
"learning_rate": 9.033945019634693e-06,
"loss": 0.5668,
"step": 2200
},
{
"epoch": 0.452590620520172,
"grad_norm": 3.609375,
"learning_rate": 9.024125654277111e-06,
"loss": 0.5291,
"step": 2210
},
{
"epoch": 0.4546385418799918,
"grad_norm": 4.1875,
"learning_rate": 9.014262030509704e-06,
"loss": 0.6214,
"step": 2220
},
{
"epoch": 0.4566864632398116,
"grad_norm": 4.125,
"learning_rate": 9.004354256815247e-06,
"loss": 0.6003,
"step": 2230
},
{
"epoch": 0.4587343845996314,
"grad_norm": 3.375,
"learning_rate": 8.994402442162083e-06,
"loss": 0.5938,
"step": 2240
},
{
"epoch": 0.46078230595945113,
"grad_norm": 3.3125,
"learning_rate": 8.984406696002925e-06,
"loss": 0.5887,
"step": 2250
},
{
"epoch": 0.4628302273192709,
"grad_norm": 3.84375,
"learning_rate": 8.97436712827367e-06,
"loss": 0.6317,
"step": 2260
},
{
"epoch": 0.4648781486790907,
"grad_norm": 3.59375,
"learning_rate": 8.964283849392163e-06,
"loss": 0.5547,
"step": 2270
},
{
"epoch": 0.4669260700389105,
"grad_norm": 3.515625,
"learning_rate": 8.954156970257001e-06,
"loss": 0.6202,
"step": 2280
},
{
"epoch": 0.4689739913987303,
"grad_norm": 3.9375,
"learning_rate": 8.94398660224631e-06,
"loss": 0.6002,
"step": 2290
},
{
"epoch": 0.4710219127585501,
"grad_norm": 3.9375,
"learning_rate": 8.933772857216517e-06,
"loss": 0.5653,
"step": 2300
},
{
"epoch": 0.47306983411836984,
"grad_norm": 3.203125,
"learning_rate": 8.923515847501117e-06,
"loss": 0.624,
"step": 2310
},
{
"epoch": 0.47511775547818963,
"grad_norm": 3.5,
"learning_rate": 8.913215685909448e-06,
"loss": 0.5667,
"step": 2320
},
{
"epoch": 0.4771656768380094,
"grad_norm": 3.8125,
"learning_rate": 8.90287248572544e-06,
"loss": 0.5623,
"step": 2330
},
{
"epoch": 0.4792135981978292,
"grad_norm": 3.640625,
"learning_rate": 8.892486360706377e-06,
"loss": 0.5846,
"step": 2340
},
{
"epoch": 0.481261519557649,
"grad_norm": 4.25,
"learning_rate": 8.882057425081635e-06,
"loss": 0.5705,
"step": 2350
},
{
"epoch": 0.48330944091746875,
"grad_norm": 3.96875,
"learning_rate": 8.871585793551435e-06,
"loss": 0.5893,
"step": 2360
},
{
"epoch": 0.48535736227728854,
"grad_norm": 4.84375,
"learning_rate": 8.861071581285584e-06,
"loss": 0.6106,
"step": 2370
},
{
"epoch": 0.48740528363710833,
"grad_norm": 3.71875,
"learning_rate": 8.850514903922198e-06,
"loss": 0.5863,
"step": 2380
},
{
"epoch": 0.4894532049969281,
"grad_norm": 3.4375,
"learning_rate": 8.839915877566438e-06,
"loss": 0.5634,
"step": 2390
},
{
"epoch": 0.4915011263567479,
"grad_norm": 3.765625,
"learning_rate": 8.829274618789228e-06,
"loss": 0.5406,
"step": 2400
},
{
"epoch": 0.49354904771656766,
"grad_norm": 3.671875,
"learning_rate": 8.818591244625977e-06,
"loss": 0.5016,
"step": 2410
},
{
"epoch": 0.49559696907638745,
"grad_norm": 3.484375,
"learning_rate": 8.807865872575294e-06,
"loss": 0.6582,
"step": 2420
},
{
"epoch": 0.49764489043620724,
"grad_norm": 3.515625,
"learning_rate": 8.797098620597683e-06,
"loss": 0.573,
"step": 2430
},
{
"epoch": 0.49969281179602704,
"grad_norm": 4.0625,
"learning_rate": 8.786289607114262e-06,
"loss": 0.6161,
"step": 2440
},
{
"epoch": 0.5017407331558468,
"grad_norm": 3.953125,
"learning_rate": 8.775438951005453e-06,
"loss": 0.5801,
"step": 2450
},
{
"epoch": 0.5037886545156666,
"grad_norm": 3.046875,
"learning_rate": 8.764546771609672e-06,
"loss": 0.5537,
"step": 2460
},
{
"epoch": 0.5058365758754864,
"grad_norm": 4.03125,
"learning_rate": 8.753613188722018e-06,
"loss": 0.5411,
"step": 2470
},
{
"epoch": 0.5078844972353062,
"grad_norm": 3.828125,
"learning_rate": 8.74263832259296e-06,
"loss": 0.531,
"step": 2480
},
{
"epoch": 0.509932418595126,
"grad_norm": 3.78125,
"learning_rate": 8.73162229392701e-06,
"loss": 0.5652,
"step": 2490
},
{
"epoch": 0.5119803399549457,
"grad_norm": 4.03125,
"learning_rate": 8.7205652238814e-06,
"loss": 0.6105,
"step": 2500
},
{
"epoch": 0.5140282613147655,
"grad_norm": 4.125,
"learning_rate": 8.709467234064738e-06,
"loss": 0.5952,
"step": 2510
},
{
"epoch": 0.5160761826745853,
"grad_norm": 4.125,
"learning_rate": 8.698328446535683e-06,
"loss": 0.5135,
"step": 2520
},
{
"epoch": 0.5181241040344051,
"grad_norm": 4.28125,
"learning_rate": 8.687148983801599e-06,
"loss": 0.6145,
"step": 2530
},
{
"epoch": 0.5201720253942249,
"grad_norm": 3.9375,
"learning_rate": 8.675928968817206e-06,
"loss": 0.5855,
"step": 2540
},
{
"epoch": 0.5222199467540446,
"grad_norm": 3.875,
"learning_rate": 8.664668524983232e-06,
"loss": 0.6286,
"step": 2550
},
{
"epoch": 0.5242678681138644,
"grad_norm": 3.0,
"learning_rate": 8.653367776145045e-06,
"loss": 0.5642,
"step": 2560
},
{
"epoch": 0.5263157894736842,
"grad_norm": 3.0625,
"learning_rate": 8.642026846591303e-06,
"loss": 0.5474,
"step": 2570
},
{
"epoch": 0.528363710833504,
"grad_norm": 3.734375,
"learning_rate": 8.63064586105258e-06,
"loss": 0.6154,
"step": 2580
},
{
"epoch": 0.5304116321933238,
"grad_norm": 4.375,
"learning_rate": 8.619224944700003e-06,
"loss": 0.5802,
"step": 2590
},
{
"epoch": 0.5324595535531436,
"grad_norm": 3.546875,
"learning_rate": 8.607764223143854e-06,
"loss": 0.5435,
"step": 2600
},
{
"epoch": 0.5345074749129634,
"grad_norm": 3.015625,
"learning_rate": 8.596263822432222e-06,
"loss": 0.585,
"step": 2610
},
{
"epoch": 0.5365553962727831,
"grad_norm": 3.171875,
"learning_rate": 8.584723869049586e-06,
"loss": 0.5417,
"step": 2620
},
{
"epoch": 0.5386033176326029,
"grad_norm": 4.1875,
"learning_rate": 8.573144489915437e-06,
"loss": 0.6276,
"step": 2630
},
{
"epoch": 0.5406512389924227,
"grad_norm": 3.5625,
"learning_rate": 8.561525812382882e-06,
"loss": 0.597,
"step": 2640
},
{
"epoch": 0.5426991603522425,
"grad_norm": 3.265625,
"learning_rate": 8.549867964237246e-06,
"loss": 0.5478,
"step": 2650
},
{
"epoch": 0.5447470817120622,
"grad_norm": 3.859375,
"learning_rate": 8.538171073694656e-06,
"loss": 0.5764,
"step": 2660
},
{
"epoch": 0.546795003071882,
"grad_norm": 3.78125,
"learning_rate": 8.526435269400644e-06,
"loss": 0.5692,
"step": 2670
},
{
"epoch": 0.5488429244317018,
"grad_norm": 3.359375,
"learning_rate": 8.514660680428723e-06,
"loss": 0.5599,
"step": 2680
},
{
"epoch": 0.5508908457915216,
"grad_norm": 3.671875,
"learning_rate": 8.50284743627897e-06,
"loss": 0.6127,
"step": 2690
},
{
"epoch": 0.5529387671513414,
"grad_norm": 4.1875,
"learning_rate": 8.490995666876602e-06,
"loss": 0.654,
"step": 2700
},
{
"epoch": 0.5549866885111612,
"grad_norm": 5.1875,
"learning_rate": 8.47910550257055e-06,
"loss": 0.5806,
"step": 2710
},
{
"epoch": 0.557034609870981,
"grad_norm": 3.984375,
"learning_rate": 8.467177074132017e-06,
"loss": 0.5577,
"step": 2720
},
{
"epoch": 0.5590825312308008,
"grad_norm": 2.984375,
"learning_rate": 8.455210512753047e-06,
"loss": 0.5119,
"step": 2730
},
{
"epoch": 0.5611304525906206,
"grad_norm": 3.59375,
"learning_rate": 8.443205950045086e-06,
"loss": 0.5889,
"step": 2740
},
{
"epoch": 0.5631783739504403,
"grad_norm": 3.3125,
"learning_rate": 8.431163518037523e-06,
"loss": 0.5612,
"step": 2750
},
{
"epoch": 0.5652262953102601,
"grad_norm": 3.21875,
"learning_rate": 8.41908334917624e-06,
"loss": 0.5988,
"step": 2760
},
{
"epoch": 0.5672742166700798,
"grad_norm": 3.9375,
"learning_rate": 8.40696557632217e-06,
"loss": 0.5484,
"step": 2770
},
{
"epoch": 0.5693221380298996,
"grad_norm": 4.34375,
"learning_rate": 8.394810332749814e-06,
"loss": 0.5484,
"step": 2780
},
{
"epoch": 0.5713700593897194,
"grad_norm": 4.9375,
"learning_rate": 8.382617752145794e-06,
"loss": 0.5683,
"step": 2790
},
{
"epoch": 0.5734179807495392,
"grad_norm": 5.03125,
"learning_rate": 8.370387968607369e-06,
"loss": 0.5616,
"step": 2800
},
{
"epoch": 0.575465902109359,
"grad_norm": 3.515625,
"learning_rate": 8.358121116640971e-06,
"loss": 0.5814,
"step": 2810
},
{
"epoch": 0.5775138234691788,
"grad_norm": 3.65625,
"learning_rate": 8.345817331160714e-06,
"loss": 0.5634,
"step": 2820
},
{
"epoch": 0.5795617448289986,
"grad_norm": 3.6875,
"learning_rate": 8.333476747486922e-06,
"loss": 0.6005,
"step": 2830
},
{
"epoch": 0.5816096661888184,
"grad_norm": 3.390625,
"learning_rate": 8.321099501344634e-06,
"loss": 0.5763,
"step": 2840
},
{
"epoch": 0.5836575875486382,
"grad_norm": 3.46875,
"learning_rate": 8.308685728862111e-06,
"loss": 0.5533,
"step": 2850
},
{
"epoch": 0.585705508908458,
"grad_norm": 4.09375,
"learning_rate": 8.296235566569343e-06,
"loss": 0.5745,
"step": 2860
},
{
"epoch": 0.5877534302682776,
"grad_norm": 3.640625,
"learning_rate": 8.283749151396542e-06,
"loss": 0.5044,
"step": 2870
},
{
"epoch": 0.5898013516280974,
"grad_norm": 3.453125,
"learning_rate": 8.27122662067264e-06,
"loss": 0.5412,
"step": 2880
},
{
"epoch": 0.5918492729879172,
"grad_norm": 3.765625,
"learning_rate": 8.258668112123781e-06,
"loss": 0.5889,
"step": 2890
},
{
"epoch": 0.593897194347737,
"grad_norm": 4.375,
"learning_rate": 8.246073763871796e-06,
"loss": 0.5874,
"step": 2900
},
{
"epoch": 0.5959451157075568,
"grad_norm": 3.265625,
"learning_rate": 8.233443714432698e-06,
"loss": 0.5976,
"step": 2910
},
{
"epoch": 0.5979930370673766,
"grad_norm": 4.21875,
"learning_rate": 8.220778102715145e-06,
"loss": 0.6134,
"step": 2920
},
{
"epoch": 0.6000409584271964,
"grad_norm": 4.03125,
"learning_rate": 8.20807706801892e-06,
"loss": 0.5816,
"step": 2930
},
{
"epoch": 0.6020888797870162,
"grad_norm": 3.578125,
"learning_rate": 8.195340750033401e-06,
"loss": 0.5833,
"step": 2940
},
{
"epoch": 0.604136801146836,
"grad_norm": 3.9375,
"learning_rate": 8.182569288836015e-06,
"loss": 0.5768,
"step": 2950
},
{
"epoch": 0.6061847225066558,
"grad_norm": 3.65625,
"learning_rate": 8.16976282489071e-06,
"loss": 0.578,
"step": 2960
},
{
"epoch": 0.6082326438664756,
"grad_norm": 3.21875,
"learning_rate": 8.1569214990464e-06,
"loss": 0.461,
"step": 2970
},
{
"epoch": 0.6102805652262953,
"grad_norm": 3.09375,
"learning_rate": 8.144045452535418e-06,
"loss": 0.5564,
"step": 2980
},
{
"epoch": 0.612328486586115,
"grad_norm": 3.828125,
"learning_rate": 8.131134826971967e-06,
"loss": 0.5592,
"step": 2990
},
{
"epoch": 0.6143764079459348,
"grad_norm": 4.4375,
"learning_rate": 8.118189764350557e-06,
"loss": 0.6086,
"step": 3000
},
{
"epoch": 0.6143764079459348,
"eval_loss": 0.564007043838501,
"eval_runtime": 49.3837,
"eval_samples_per_second": 83.266,
"eval_steps_per_second": 41.633,
"step": 3000
},
{
"epoch": 0.6164243293057546,
"grad_norm": 3.890625,
"learning_rate": 8.105210407044452e-06,
"loss": 0.6133,
"step": 3010
},
{
"epoch": 0.6184722506655744,
"grad_norm": 3.734375,
"learning_rate": 8.09219689780409e-06,
"loss": 0.5601,
"step": 3020
},
{
"epoch": 0.6205201720253942,
"grad_norm": 3.828125,
"learning_rate": 8.079149379755525e-06,
"loss": 0.569,
"step": 3030
},
{
"epoch": 0.622568093385214,
"grad_norm": 4.375,
"learning_rate": 8.066067996398853e-06,
"loss": 0.5513,
"step": 3040
},
{
"epoch": 0.6246160147450338,
"grad_norm": 4.4375,
"learning_rate": 8.052952891606617e-06,
"loss": 0.5664,
"step": 3050
},
{
"epoch": 0.6266639361048536,
"grad_norm": 4.09375,
"learning_rate": 8.039804209622255e-06,
"loss": 0.5716,
"step": 3060
},
{
"epoch": 0.6287118574646734,
"grad_norm": 3.6875,
"learning_rate": 8.026622095058483e-06,
"loss": 0.5099,
"step": 3070
},
{
"epoch": 0.6307597788244932,
"grad_norm": 4.125,
"learning_rate": 8.01340669289572e-06,
"loss": 0.5344,
"step": 3080
},
{
"epoch": 0.6328077001843129,
"grad_norm": 3.78125,
"learning_rate": 8.000158148480497e-06,
"loss": 0.5317,
"step": 3090
},
{
"epoch": 0.6348556215441327,
"grad_norm": 3.25,
"learning_rate": 7.986876607523842e-06,
"loss": 0.5768,
"step": 3100
},
{
"epoch": 0.6369035429039525,
"grad_norm": 6.375,
"learning_rate": 7.973562216099694e-06,
"loss": 0.5755,
"step": 3110
},
{
"epoch": 0.6389514642637723,
"grad_norm": 3.78125,
"learning_rate": 7.960215120643294e-06,
"loss": 0.5833,
"step": 3120
},
{
"epoch": 0.640999385623592,
"grad_norm": 4.1875,
"learning_rate": 7.946835467949561e-06,
"loss": 0.5504,
"step": 3130
},
{
"epoch": 0.6430473069834118,
"grad_norm": 4.0,
"learning_rate": 7.933423405171496e-06,
"loss": 0.5595,
"step": 3140
},
{
"epoch": 0.6450952283432316,
"grad_norm": 3.984375,
"learning_rate": 7.91997907981855e-06,
"loss": 0.5771,
"step": 3150
},
{
"epoch": 0.6471431497030514,
"grad_norm": 3.953125,
"learning_rate": 7.906502639755008e-06,
"loss": 0.5923,
"step": 3160
},
{
"epoch": 0.6491910710628712,
"grad_norm": 3.0,
"learning_rate": 7.892994233198361e-06,
"loss": 0.5906,
"step": 3170
},
{
"epoch": 0.651238992422691,
"grad_norm": 3.671875,
"learning_rate": 7.879454008717675e-06,
"loss": 0.5744,
"step": 3180
},
{
"epoch": 0.6532869137825108,
"grad_norm": 3.78125,
"learning_rate": 7.865882115231959e-06,
"loss": 0.5737,
"step": 3190
},
{
"epoch": 0.6553348351423305,
"grad_norm": 4.1875,
"learning_rate": 7.852278702008527e-06,
"loss": 0.5927,
"step": 3200
},
{
"epoch": 0.6573827565021503,
"grad_norm": 3.625,
"learning_rate": 7.838643918661354e-06,
"loss": 0.5658,
"step": 3210
},
{
"epoch": 0.6594306778619701,
"grad_norm": 3.046875,
"learning_rate": 7.824977915149432e-06,
"loss": 0.5321,
"step": 3220
},
{
"epoch": 0.6614785992217899,
"grad_norm": 3.390625,
"learning_rate": 7.811280841775124e-06,
"loss": 0.5156,
"step": 3230
},
{
"epoch": 0.6635265205816097,
"grad_norm": 3.578125,
"learning_rate": 7.797552849182502e-06,
"loss": 0.5588,
"step": 3240
},
{
"epoch": 0.6655744419414295,
"grad_norm": 3.90625,
"learning_rate": 7.783794088355698e-06,
"loss": 0.6302,
"step": 3250
},
{
"epoch": 0.6676223633012492,
"grad_norm": 3.1875,
"learning_rate": 7.770004710617247e-06,
"loss": 0.5842,
"step": 3260
},
{
"epoch": 0.669670284661069,
"grad_norm": 3.015625,
"learning_rate": 7.756184867626407e-06,
"loss": 0.5592,
"step": 3270
},
{
"epoch": 0.6717182060208888,
"grad_norm": 3.625,
"learning_rate": 7.742334711377509e-06,
"loss": 0.5674,
"step": 3280
},
{
"epoch": 0.6737661273807086,
"grad_norm": 3.90625,
"learning_rate": 7.728454394198271e-06,
"loss": 0.6026,
"step": 3290
},
{
"epoch": 0.6758140487405284,
"grad_norm": 3.265625,
"learning_rate": 7.714544068748137e-06,
"loss": 0.5144,
"step": 3300
},
{
"epoch": 0.6778619701003481,
"grad_norm": 3.625,
"learning_rate": 7.700603888016583e-06,
"loss": 0.5396,
"step": 3310
},
{
"epoch": 0.6799098914601679,
"grad_norm": 3.796875,
"learning_rate": 7.686634005321442e-06,
"loss": 0.5889,
"step": 3320
},
{
"epoch": 0.6819578128199877,
"grad_norm": 2.921875,
"learning_rate": 7.672634574307223e-06,
"loss": 0.5414,
"step": 3330
},
{
"epoch": 0.6840057341798075,
"grad_norm": 2.78125,
"learning_rate": 7.658605748943407e-06,
"loss": 0.4579,
"step": 3340
},
{
"epoch": 0.6860536555396273,
"grad_norm": 4.5625,
"learning_rate": 7.644547683522767e-06,
"loss": 0.5635,
"step": 3350
},
{
"epoch": 0.6881015768994471,
"grad_norm": 3.59375,
"learning_rate": 7.630460532659667e-06,
"loss": 0.555,
"step": 3360
},
{
"epoch": 0.6901494982592669,
"grad_norm": 4.375,
"learning_rate": 7.616344451288357e-06,
"loss": 0.5808,
"step": 3370
},
{
"epoch": 0.6921974196190867,
"grad_norm": 3.28125,
"learning_rate": 7.602199594661273e-06,
"loss": 0.5623,
"step": 3380
},
{
"epoch": 0.6942453409789064,
"grad_norm": 3.46875,
"learning_rate": 7.588026118347334e-06,
"loss": 0.5326,
"step": 3390
},
{
"epoch": 0.6962932623387262,
"grad_norm": 2.84375,
"learning_rate": 7.5738241782302156e-06,
"loss": 0.5501,
"step": 3400
},
{
"epoch": 0.698341183698546,
"grad_norm": 3.96875,
"learning_rate": 7.559593930506658e-06,
"loss": 0.5593,
"step": 3410
},
{
"epoch": 0.7003891050583657,
"grad_norm": 2.90625,
"learning_rate": 7.545335531684725e-06,
"loss": 0.5492,
"step": 3420
},
{
"epoch": 0.7024370264181855,
"grad_norm": 4.96875,
"learning_rate": 7.531049138582105e-06,
"loss": 0.5853,
"step": 3430
},
{
"epoch": 0.7044849477780053,
"grad_norm": 3.59375,
"learning_rate": 7.516734908324363e-06,
"loss": 0.5875,
"step": 3440
},
{
"epoch": 0.7065328691378251,
"grad_norm": 3.453125,
"learning_rate": 7.5023929983432344e-06,
"loss": 0.5459,
"step": 3450
},
{
"epoch": 0.7085807904976449,
"grad_norm": 3.171875,
"learning_rate": 7.488023566374878e-06,
"loss": 0.4829,
"step": 3460
},
{
"epoch": 0.7106287118574647,
"grad_norm": 4.71875,
"learning_rate": 7.4736267704581485e-06,
"loss": 0.6025,
"step": 3470
},
{
"epoch": 0.7126766332172845,
"grad_norm": 2.796875,
"learning_rate": 7.459202768932856e-06,
"loss": 0.5715,
"step": 3480
},
{
"epoch": 0.7147245545771043,
"grad_norm": 3.21875,
"learning_rate": 7.444751720438026e-06,
"loss": 0.5352,
"step": 3490
},
{
"epoch": 0.7167724759369241,
"grad_norm": 3.953125,
"learning_rate": 7.430273783910153e-06,
"loss": 0.554,
"step": 3500
},
{
"epoch": 0.7188203972967439,
"grad_norm": 3.375,
"learning_rate": 7.415769118581456e-06,
"loss": 0.4954,
"step": 3510
},
{
"epoch": 0.7208683186565635,
"grad_norm": 3.859375,
"learning_rate": 7.40123788397812e-06,
"loss": 0.5202,
"step": 3520
},
{
"epoch": 0.7229162400163833,
"grad_norm": 3.828125,
"learning_rate": 7.386680239918548e-06,
"loss": 0.5294,
"step": 3530
},
{
"epoch": 0.7249641613762031,
"grad_norm": 4.09375,
"learning_rate": 7.372096346511602e-06,
"loss": 0.5507,
"step": 3540
},
{
"epoch": 0.7270120827360229,
"grad_norm": 3.0625,
"learning_rate": 7.357486364154842e-06,
"loss": 0.5977,
"step": 3550
},
{
"epoch": 0.7290600040958427,
"grad_norm": 2.34375,
"learning_rate": 7.3428504535327585e-06,
"loss": 0.5502,
"step": 3560
},
{
"epoch": 0.7311079254556625,
"grad_norm": 3.453125,
"learning_rate": 7.328188775615009e-06,
"loss": 0.5709,
"step": 3570
},
{
"epoch": 0.7331558468154823,
"grad_norm": 3.265625,
"learning_rate": 7.313501491654649e-06,
"loss": 0.5813,
"step": 3580
},
{
"epoch": 0.7352037681753021,
"grad_norm": 3.375,
"learning_rate": 7.298788763186353e-06,
"loss": 0.5048,
"step": 3590
},
{
"epoch": 0.7372516895351219,
"grad_norm": 3.3125,
"learning_rate": 7.284050752024643e-06,
"loss": 0.5842,
"step": 3600
},
{
"epoch": 0.7392996108949417,
"grad_norm": 3.515625,
"learning_rate": 7.269287620262105e-06,
"loss": 0.5087,
"step": 3610
},
{
"epoch": 0.7413475322547615,
"grad_norm": 3.578125,
"learning_rate": 7.25449953026761e-06,
"loss": 0.5671,
"step": 3620
},
{
"epoch": 0.7433954536145811,
"grad_norm": 3.90625,
"learning_rate": 7.239686644684525e-06,
"loss": 0.5711,
"step": 3630
},
{
"epoch": 0.7454433749744009,
"grad_norm": 3.703125,
"learning_rate": 7.224849126428925e-06,
"loss": 0.5572,
"step": 3640
},
{
"epoch": 0.7474912963342207,
"grad_norm": 4.5,
"learning_rate": 7.209987138687806e-06,
"loss": 0.543,
"step": 3650
},
{
"epoch": 0.7495392176940405,
"grad_norm": 3.5,
"learning_rate": 7.195100844917281e-06,
"loss": 0.5518,
"step": 3660
},
{
"epoch": 0.7515871390538603,
"grad_norm": 2.875,
"learning_rate": 7.180190408840788e-06,
"loss": 0.5479,
"step": 3670
},
{
"epoch": 0.7536350604136801,
"grad_norm": 3.953125,
"learning_rate": 7.165255994447288e-06,
"loss": 0.5755,
"step": 3680
},
{
"epoch": 0.7556829817734999,
"grad_norm": 4.0,
"learning_rate": 7.150297765989467e-06,
"loss": 0.6021,
"step": 3690
},
{
"epoch": 0.7577309031333197,
"grad_norm": 3.734375,
"learning_rate": 7.135315887981918e-06,
"loss": 0.5618,
"step": 3700
},
{
"epoch": 0.7597788244931395,
"grad_norm": 3.375,
"learning_rate": 7.120310525199341e-06,
"loss": 0.5302,
"step": 3710
},
{
"epoch": 0.7618267458529593,
"grad_norm": 3.015625,
"learning_rate": 7.105281842674729e-06,
"loss": 0.4933,
"step": 3720
},
{
"epoch": 0.7638746672127791,
"grad_norm": 3.140625,
"learning_rate": 7.090230005697552e-06,
"loss": 0.588,
"step": 3730
},
{
"epoch": 0.7659225885725988,
"grad_norm": 3.546875,
"learning_rate": 7.075155179811931e-06,
"loss": 0.5628,
"step": 3740
},
{
"epoch": 0.7679705099324186,
"grad_norm": 3.265625,
"learning_rate": 7.0600575308148375e-06,
"loss": 0.5523,
"step": 3750
},
{
"epoch": 0.7700184312922383,
"grad_norm": 3.359375,
"learning_rate": 7.044937224754249e-06,
"loss": 0.5648,
"step": 3760
},
{
"epoch": 0.7720663526520581,
"grad_norm": 3.46875,
"learning_rate": 7.029794427927337e-06,
"loss": 0.5495,
"step": 3770
},
{
"epoch": 0.7741142740118779,
"grad_norm": 3.65625,
"learning_rate": 7.0146293068786255e-06,
"loss": 0.6386,
"step": 3780
},
{
"epoch": 0.7761621953716977,
"grad_norm": 3.828125,
"learning_rate": 6.999442028398171e-06,
"loss": 0.5634,
"step": 3790
},
{
"epoch": 0.7782101167315175,
"grad_norm": 4.34375,
"learning_rate": 6.984232759519725e-06,
"loss": 0.4956,
"step": 3800
},
{
"epoch": 0.7802580380913373,
"grad_norm": 3.84375,
"learning_rate": 6.969001667518891e-06,
"loss": 0.5745,
"step": 3810
},
{
"epoch": 0.7823059594511571,
"grad_norm": 3.171875,
"learning_rate": 6.9537489199112915e-06,
"loss": 0.5519,
"step": 3820
},
{
"epoch": 0.7843538808109769,
"grad_norm": 3.1875,
"learning_rate": 6.938474684450725e-06,
"loss": 0.5338,
"step": 3830
},
{
"epoch": 0.7864018021707967,
"grad_norm": 3.859375,
"learning_rate": 6.923179129127312e-06,
"loss": 0.641,
"step": 3840
},
{
"epoch": 0.7884497235306164,
"grad_norm": 3.25,
"learning_rate": 6.907862422165661e-06,
"loss": 0.4959,
"step": 3850
},
{
"epoch": 0.7904976448904362,
"grad_norm": 4.09375,
"learning_rate": 6.892524732023013e-06,
"loss": 0.5589,
"step": 3860
},
{
"epoch": 0.792545566250256,
"grad_norm": 4.28125,
"learning_rate": 6.877166227387382e-06,
"loss": 0.6053,
"step": 3870
},
{
"epoch": 0.7945934876100758,
"grad_norm": 3.125,
"learning_rate": 6.86178707717571e-06,
"loss": 0.5434,
"step": 3880
},
{
"epoch": 0.7966414089698955,
"grad_norm": 4.0625,
"learning_rate": 6.8463874505320004e-06,
"loss": 0.5558,
"step": 3890
},
{
"epoch": 0.7986893303297153,
"grad_norm": 4.15625,
"learning_rate": 6.8309675168254664e-06,
"loss": 0.5358,
"step": 3900
},
{
"epoch": 0.8007372516895351,
"grad_norm": 3.921875,
"learning_rate": 6.81552744564866e-06,
"loss": 0.5228,
"step": 3910
},
{
"epoch": 0.8027851730493549,
"grad_norm": 3.328125,
"learning_rate": 6.800067406815612e-06,
"loss": 0.5956,
"step": 3920
},
{
"epoch": 0.8048330944091747,
"grad_norm": 3.140625,
"learning_rate": 6.784587570359965e-06,
"loss": 0.5785,
"step": 3930
},
{
"epoch": 0.8068810157689945,
"grad_norm": 4.21875,
"learning_rate": 6.769088106533097e-06,
"loss": 0.5481,
"step": 3940
},
{
"epoch": 0.8089289371288143,
"grad_norm": 4.21875,
"learning_rate": 6.753569185802256e-06,
"loss": 0.5554,
"step": 3950
},
{
"epoch": 0.810976858488634,
"grad_norm": 4.125,
"learning_rate": 6.73803097884868e-06,
"loss": 0.5738,
"step": 3960
},
{
"epoch": 0.8130247798484538,
"grad_norm": 2.984375,
"learning_rate": 6.722473656565724e-06,
"loss": 0.5275,
"step": 3970
},
{
"epoch": 0.8150727012082736,
"grad_norm": 4.0625,
"learning_rate": 6.706897390056979e-06,
"loss": 0.597,
"step": 3980
},
{
"epoch": 0.8171206225680934,
"grad_norm": 4.40625,
"learning_rate": 6.691302350634386e-06,
"loss": 0.5808,
"step": 3990
},
{
"epoch": 0.8191685439279132,
"grad_norm": 2.9375,
"learning_rate": 6.675688709816362e-06,
"loss": 0.5217,
"step": 4000
},
{
"epoch": 0.8191685439279132,
"eval_loss": 0.5564442873001099,
"eval_runtime": 48.7272,
"eval_samples_per_second": 84.388,
"eval_steps_per_second": 42.194,
"step": 4000
},
{
"epoch": 0.821216465287733,
"grad_norm": 3.203125,
"learning_rate": 6.660056639325898e-06,
"loss": 0.5558,
"step": 4010
},
{
"epoch": 0.8232643866475527,
"grad_norm": 3.5,
"learning_rate": 6.644406311088687e-06,
"loss": 0.5654,
"step": 4020
},
{
"epoch": 0.8253123080073725,
"grad_norm": 3.640625,
"learning_rate": 6.628737897231226e-06,
"loss": 0.5095,
"step": 4030
},
{
"epoch": 0.8273602293671923,
"grad_norm": 2.9375,
"learning_rate": 6.613051570078914e-06,
"loss": 0.5459,
"step": 4040
},
{
"epoch": 0.8294081507270121,
"grad_norm": 4.96875,
"learning_rate": 6.597347502154177e-06,
"loss": 0.5534,
"step": 4050
},
{
"epoch": 0.8314560720868319,
"grad_norm": 3.875,
"learning_rate": 6.581625866174548e-06,
"loss": 0.598,
"step": 4060
},
{
"epoch": 0.8335039934466516,
"grad_norm": 3.84375,
"learning_rate": 6.565886835050785e-06,
"loss": 0.5429,
"step": 4070
},
{
"epoch": 0.8355519148064714,
"grad_norm": 3.4375,
"learning_rate": 6.550130581884958e-06,
"loss": 0.5431,
"step": 4080
},
{
"epoch": 0.8375998361662912,
"grad_norm": 3.65625,
"learning_rate": 6.534357279968554e-06,
"loss": 0.5966,
"step": 4090
},
{
"epoch": 0.839647757526111,
"grad_norm": 3.71875,
"learning_rate": 6.5185671027805655e-06,
"loss": 0.5669,
"step": 4100
},
{
"epoch": 0.8416956788859308,
"grad_norm": 3.28125,
"learning_rate": 6.5027602239855805e-06,
"loss": 0.5071,
"step": 4110
},
{
"epoch": 0.8437436002457506,
"grad_norm": 3.75,
"learning_rate": 6.486936817431878e-06,
"loss": 0.5571,
"step": 4120
},
{
"epoch": 0.8457915216055704,
"grad_norm": 4.03125,
"learning_rate": 6.471097057149516e-06,
"loss": 0.6112,
"step": 4130
},
{
"epoch": 0.8478394429653902,
"grad_norm": 3.21875,
"learning_rate": 6.45524111734841e-06,
"loss": 0.5302,
"step": 4140
},
{
"epoch": 0.84988736432521,
"grad_norm": 4.4375,
"learning_rate": 6.439369172416428e-06,
"loss": 0.5725,
"step": 4150
},
{
"epoch": 0.8519352856850297,
"grad_norm": 3.5625,
"learning_rate": 6.4234813969174615e-06,
"loss": 0.5502,
"step": 4160
},
{
"epoch": 0.8539832070448494,
"grad_norm": 3.453125,
"learning_rate": 6.407577965589515e-06,
"loss": 0.5261,
"step": 4170
},
{
"epoch": 0.8560311284046692,
"grad_norm": 3.46875,
"learning_rate": 6.391659053342778e-06,
"loss": 0.5446,
"step": 4180
},
{
"epoch": 0.858079049764489,
"grad_norm": 3.65625,
"learning_rate": 6.3757248352577015e-06,
"loss": 0.6036,
"step": 4190
},
{
"epoch": 0.8601269711243088,
"grad_norm": 3.203125,
"learning_rate": 6.359775486583077e-06,
"loss": 0.5096,
"step": 4200
},
{
"epoch": 0.8621748924841286,
"grad_norm": 4.0,
"learning_rate": 6.343811182734108e-06,
"loss": 0.5413,
"step": 4210
},
{
"epoch": 0.8642228138439484,
"grad_norm": 3.953125,
"learning_rate": 6.327832099290473e-06,
"loss": 0.5094,
"step": 4220
},
{
"epoch": 0.8662707352037682,
"grad_norm": 3.90625,
"learning_rate": 6.311838411994404e-06,
"loss": 0.5392,
"step": 4230
},
{
"epoch": 0.868318656563588,
"grad_norm": 3.484375,
"learning_rate": 6.295830296748753e-06,
"loss": 0.6021,
"step": 4240
},
{
"epoch": 0.8703665779234078,
"grad_norm": 3.171875,
"learning_rate": 6.279807929615047e-06,
"loss": 0.5416,
"step": 4250
},
{
"epoch": 0.8724144992832276,
"grad_norm": 3.796875,
"learning_rate": 6.263771486811567e-06,
"loss": 0.5066,
"step": 4260
},
{
"epoch": 0.8744624206430474,
"grad_norm": 3.890625,
"learning_rate": 6.2477211447113975e-06,
"loss": 0.5029,
"step": 4270
},
{
"epoch": 0.876510342002867,
"grad_norm": 4.03125,
"learning_rate": 6.231657079840491e-06,
"loss": 0.5448,
"step": 4280
},
{
"epoch": 0.8785582633626868,
"grad_norm": 3.4375,
"learning_rate": 6.215579468875729e-06,
"loss": 0.5299,
"step": 4290
},
{
"epoch": 0.8806061847225066,
"grad_norm": 3.53125,
"learning_rate": 6.199488488642975e-06,
"loss": 0.5806,
"step": 4300
},
{
"epoch": 0.8826541060823264,
"grad_norm": 3.375,
"learning_rate": 6.18338431611513e-06,
"loss": 0.5305,
"step": 4310
},
{
"epoch": 0.8847020274421462,
"grad_norm": 4.15625,
"learning_rate": 6.167267128410191e-06,
"loss": 0.5216,
"step": 4320
},
{
"epoch": 0.886749948801966,
"grad_norm": 3.375,
"learning_rate": 6.151137102789297e-06,
"loss": 0.5515,
"step": 4330
},
{
"epoch": 0.8887978701617858,
"grad_norm": 3.0625,
"learning_rate": 6.1349944166547825e-06,
"loss": 0.5276,
"step": 4340
},
{
"epoch": 0.8908457915216056,
"grad_norm": 3.171875,
"learning_rate": 6.118839247548226e-06,
"loss": 0.5427,
"step": 4350
},
{
"epoch": 0.8928937128814254,
"grad_norm": 3.171875,
"learning_rate": 6.102671773148494e-06,
"loss": 0.5531,
"step": 4360
},
{
"epoch": 0.8949416342412452,
"grad_norm": 3.4375,
"learning_rate": 6.086492171269794e-06,
"loss": 0.509,
"step": 4370
},
{
"epoch": 0.896989555601065,
"grad_norm": 4.5625,
"learning_rate": 6.070300619859715e-06,
"loss": 0.5731,
"step": 4380
},
{
"epoch": 0.8990374769608847,
"grad_norm": 3.1875,
"learning_rate": 6.054097296997266e-06,
"loss": 0.5113,
"step": 4390
},
{
"epoch": 0.9010853983207044,
"grad_norm": 5.03125,
"learning_rate": 6.037882380890924e-06,
"loss": 0.5583,
"step": 4400
},
{
"epoch": 0.9031333196805242,
"grad_norm": 3.421875,
"learning_rate": 6.021656049876672e-06,
"loss": 0.5341,
"step": 4410
},
{
"epoch": 0.905181241040344,
"grad_norm": 4.0,
"learning_rate": 6.005418482416036e-06,
"loss": 0.5671,
"step": 4420
},
{
"epoch": 0.9072291624001638,
"grad_norm": 3.96875,
"learning_rate": 5.989169857094126e-06,
"loss": 0.5846,
"step": 4430
},
{
"epoch": 0.9092770837599836,
"grad_norm": 3.203125,
"learning_rate": 5.972910352617667e-06,
"loss": 0.552,
"step": 4440
},
{
"epoch": 0.9113250051198034,
"grad_norm": 3.78125,
"learning_rate": 5.956640147813035e-06,
"loss": 0.5681,
"step": 4450
},
{
"epoch": 0.9133729264796232,
"grad_norm": 4.0,
"learning_rate": 5.940359421624292e-06,
"loss": 0.5831,
"step": 4460
},
{
"epoch": 0.915420847839443,
"grad_norm": 3.21875,
"learning_rate": 5.92406835311122e-06,
"loss": 0.5516,
"step": 4470
},
{
"epoch": 0.9174687691992628,
"grad_norm": 3.703125,
"learning_rate": 5.907767121447343e-06,
"loss": 0.5779,
"step": 4480
},
{
"epoch": 0.9195166905590826,
"grad_norm": 4.09375,
"learning_rate": 5.891455905917968e-06,
"loss": 0.6099,
"step": 4490
},
{
"epoch": 0.9215646119189023,
"grad_norm": 3.34375,
"learning_rate": 5.8751348859182025e-06,
"loss": 0.5506,
"step": 4500
},
{
"epoch": 0.9236125332787221,
"grad_norm": 4.03125,
"learning_rate": 5.858804240950988e-06,
"loss": 0.533,
"step": 4510
},
{
"epoch": 0.9256604546385419,
"grad_norm": 4.03125,
"learning_rate": 5.842464150625125e-06,
"loss": 0.5653,
"step": 4520
},
{
"epoch": 0.9277083759983616,
"grad_norm": 3.484375,
"learning_rate": 5.826114794653294e-06,
"loss": 0.5573,
"step": 4530
},
{
"epoch": 0.9297562973581814,
"grad_norm": 3.6875,
"learning_rate": 5.809756352850083e-06,
"loss": 0.5227,
"step": 4540
},
{
"epoch": 0.9318042187180012,
"grad_norm": 3.734375,
"learning_rate": 5.793389005130011e-06,
"loss": 0.538,
"step": 4550
},
{
"epoch": 0.933852140077821,
"grad_norm": 3.765625,
"learning_rate": 5.7770129315055435e-06,
"loss": 0.514,
"step": 4560
},
{
"epoch": 0.9359000614376408,
"grad_norm": 3.75,
"learning_rate": 5.760628312085114e-06,
"loss": 0.5736,
"step": 4570
},
{
"epoch": 0.9379479827974606,
"grad_norm": 4.28125,
"learning_rate": 5.744235327071151e-06,
"loss": 0.5473,
"step": 4580
},
{
"epoch": 0.9399959041572804,
"grad_norm": 4.1875,
"learning_rate": 5.727834156758082e-06,
"loss": 0.5422,
"step": 4590
},
{
"epoch": 0.9420438255171002,
"grad_norm": 3.203125,
"learning_rate": 5.711424981530367e-06,
"loss": 0.5134,
"step": 4600
},
{
"epoch": 0.9440917468769199,
"grad_norm": 3.453125,
"learning_rate": 5.6950079818605e-06,
"loss": 0.5406,
"step": 4610
},
{
"epoch": 0.9461396682367397,
"grad_norm": 4.5,
"learning_rate": 5.678583338307033e-06,
"loss": 0.6124,
"step": 4620
},
{
"epoch": 0.9481875895965595,
"grad_norm": 3.59375,
"learning_rate": 5.662151231512588e-06,
"loss": 0.5095,
"step": 4630
},
{
"epoch": 0.9502355109563793,
"grad_norm": 5.15625,
"learning_rate": 5.645711842201869e-06,
"loss": 0.5337,
"step": 4640
},
{
"epoch": 0.952283432316199,
"grad_norm": 3.4375,
"learning_rate": 5.629265351179673e-06,
"loss": 0.5252,
"step": 4650
},
{
"epoch": 0.9543313536760188,
"grad_norm": 3.390625,
"learning_rate": 5.612811939328907e-06,
"loss": 0.5631,
"step": 4660
},
{
"epoch": 0.9563792750358386,
"grad_norm": 3.78125,
"learning_rate": 5.596351787608592e-06,
"loss": 0.5531,
"step": 4670
},
{
"epoch": 0.9584271963956584,
"grad_norm": 3.671875,
"learning_rate": 5.579885077051879e-06,
"loss": 0.5673,
"step": 4680
},
{
"epoch": 0.9604751177554782,
"grad_norm": 4.0625,
"learning_rate": 5.5634119887640515e-06,
"loss": 0.5721,
"step": 4690
},
{
"epoch": 0.962523039115298,
"grad_norm": 3.875,
"learning_rate": 5.546932703920539e-06,
"loss": 0.577,
"step": 4700
},
{
"epoch": 0.9645709604751177,
"grad_norm": 3.5625,
"learning_rate": 5.530447403764924e-06,
"loss": 0.4924,
"step": 4710
},
{
"epoch": 0.9666188818349375,
"grad_norm": 3.875,
"learning_rate": 5.513956269606944e-06,
"loss": 0.5552,
"step": 4720
},
{
"epoch": 0.9686668031947573,
"grad_norm": 3.8125,
"learning_rate": 5.497459482820499e-06,
"loss": 0.6076,
"step": 4730
},
{
"epoch": 0.9707147245545771,
"grad_norm": 3.65625,
"learning_rate": 5.480957224841666e-06,
"loss": 0.5824,
"step": 4740
},
{
"epoch": 0.9727626459143969,
"grad_norm": 4.59375,
"learning_rate": 5.4644496771666845e-06,
"loss": 0.5634,
"step": 4750
},
{
"epoch": 0.9748105672742167,
"grad_norm": 4.3125,
"learning_rate": 5.447937021349979e-06,
"loss": 0.5331,
"step": 4760
},
{
"epoch": 0.9768584886340365,
"grad_norm": 3.4375,
"learning_rate": 5.431419439002155e-06,
"loss": 0.5859,
"step": 4770
},
{
"epoch": 0.9789064099938563,
"grad_norm": 2.765625,
"learning_rate": 5.414897111787998e-06,
"loss": 0.5416,
"step": 4780
},
{
"epoch": 0.980954331353676,
"grad_norm": 3.3125,
"learning_rate": 5.3983702214244805e-06,
"loss": 0.5383,
"step": 4790
},
{
"epoch": 0.9830022527134958,
"grad_norm": 2.859375,
"learning_rate": 5.381838949678759e-06,
"loss": 0.5247,
"step": 4800
},
{
"epoch": 0.9850501740733156,
"grad_norm": 4.21875,
"learning_rate": 5.365303478366184e-06,
"loss": 0.5983,
"step": 4810
},
{
"epoch": 0.9870980954331353,
"grad_norm": 3.96875,
"learning_rate": 5.348763989348285e-06,
"loss": 0.5884,
"step": 4820
},
{
"epoch": 0.9891460167929551,
"grad_norm": 3.5,
"learning_rate": 5.332220664530788e-06,
"loss": 0.5408,
"step": 4830
},
{
"epoch": 0.9911939381527749,
"grad_norm": 3.421875,
"learning_rate": 5.315673685861602e-06,
"loss": 0.4796,
"step": 4840
},
{
"epoch": 0.9932418595125947,
"grad_norm": 4.0625,
"learning_rate": 5.29912323532882e-06,
"loss": 0.5175,
"step": 4850
},
{
"epoch": 0.9952897808724145,
"grad_norm": 3.515625,
"learning_rate": 5.282569494958723e-06,
"loss": 0.5127,
"step": 4860
},
{
"epoch": 0.9973377022322343,
"grad_norm": 3.796875,
"learning_rate": 5.266012646813774e-06,
"loss": 0.5912,
"step": 4870
},
{
"epoch": 0.9993856235920541,
"grad_norm": 4.21875,
"learning_rate": 5.249452872990616e-06,
"loss": 0.5986,
"step": 4880
},
{
"epoch": 1.0014335449518739,
"grad_norm": 4.21875,
"learning_rate": 5.2328903556180666e-06,
"loss": 0.5311,
"step": 4890
},
{
"epoch": 1.0034814663116935,
"grad_norm": 3.15625,
"learning_rate": 5.216325276855122e-06,
"loss": 0.5112,
"step": 4900
},
{
"epoch": 1.0055293876715135,
"grad_norm": 3.53125,
"learning_rate": 5.1997578188889465e-06,
"loss": 0.4965,
"step": 4910
},
{
"epoch": 1.0075773090313331,
"grad_norm": 3.46875,
"learning_rate": 5.183188163932873e-06,
"loss": 0.4898,
"step": 4920
},
{
"epoch": 1.009625230391153,
"grad_norm": 3.359375,
"learning_rate": 5.166616494224393e-06,
"loss": 0.5006,
"step": 4930
},
{
"epoch": 1.0116731517509727,
"grad_norm": 3.625,
"learning_rate": 5.150042992023166e-06,
"loss": 0.5103,
"step": 4940
},
{
"epoch": 1.0137210731107926,
"grad_norm": 3.609375,
"learning_rate": 5.133467839608998e-06,
"loss": 0.5185,
"step": 4950
},
{
"epoch": 1.0157689944706123,
"grad_norm": 3.140625,
"learning_rate": 5.116891219279846e-06,
"loss": 0.517,
"step": 4960
},
{
"epoch": 1.0178169158304322,
"grad_norm": 4.25,
"learning_rate": 5.1003133133498115e-06,
"loss": 0.5011,
"step": 4970
},
{
"epoch": 1.019864837190252,
"grad_norm": 3.578125,
"learning_rate": 5.083734304147139e-06,
"loss": 0.5249,
"step": 4980
},
{
"epoch": 1.0219127585500716,
"grad_norm": 3.875,
"learning_rate": 5.067154374012201e-06,
"loss": 0.5371,
"step": 4990
},
{
"epoch": 1.0239606799098915,
"grad_norm": 3.21875,
"learning_rate": 5.050573705295504e-06,
"loss": 0.5436,
"step": 5000
},
{
"epoch": 1.0239606799098915,
"eval_loss": 0.5540497303009033,
"eval_runtime": 48.9146,
"eval_samples_per_second": 84.065,
"eval_steps_per_second": 42.032,
"step": 5000
},
{
"epoch": 1.0260086012697112,
"grad_norm": 3.75,
"learning_rate": 5.033992480355675e-06,
"loss": 0.5154,
"step": 5010
},
{
"epoch": 1.028056522629531,
"grad_norm": 3.4375,
"learning_rate": 5.01741088155746e-06,
"loss": 0.4631,
"step": 5020
},
{
"epoch": 1.0301044439893507,
"grad_norm": 3.75,
"learning_rate": 5.000829091269713e-06,
"loss": 0.4978,
"step": 5030
},
{
"epoch": 1.0321523653491707,
"grad_norm": 3.828125,
"learning_rate": 4.984247291863399e-06,
"loss": 0.5375,
"step": 5040
},
{
"epoch": 1.0342002867089903,
"grad_norm": 3.796875,
"learning_rate": 4.967665665709582e-06,
"loss": 0.4977,
"step": 5050
},
{
"epoch": 1.0362482080688102,
"grad_norm": 3.390625,
"learning_rate": 4.951084395177416e-06,
"loss": 0.4551,
"step": 5060
},
{
"epoch": 1.03829612942863,
"grad_norm": 4.125,
"learning_rate": 4.93450366263215e-06,
"loss": 0.4873,
"step": 5070
},
{
"epoch": 1.0403440507884498,
"grad_norm": 3.90625,
"learning_rate": 4.917923650433116e-06,
"loss": 0.4607,
"step": 5080
},
{
"epoch": 1.0423919721482695,
"grad_norm": 4.34375,
"learning_rate": 4.901344540931715e-06,
"loss": 0.5109,
"step": 5090
},
{
"epoch": 1.0444398935080892,
"grad_norm": 4.25,
"learning_rate": 4.884766516469433e-06,
"loss": 0.4653,
"step": 5100
},
{
"epoch": 1.046487814867909,
"grad_norm": 3.625,
"learning_rate": 4.868189759375807e-06,
"loss": 0.5088,
"step": 5110
},
{
"epoch": 1.0485357362277288,
"grad_norm": 3.921875,
"learning_rate": 4.851614451966451e-06,
"loss": 0.498,
"step": 5120
},
{
"epoch": 1.0505836575875487,
"grad_norm": 3.53125,
"learning_rate": 4.835040776541022e-06,
"loss": 0.5035,
"step": 5130
},
{
"epoch": 1.0526315789473684,
"grad_norm": 3.3125,
"learning_rate": 4.818468915381236e-06,
"loss": 0.4899,
"step": 5140
},
{
"epoch": 1.0546795003071883,
"grad_norm": 3.734375,
"learning_rate": 4.801899050748854e-06,
"loss": 0.524,
"step": 5150
},
{
"epoch": 1.056727421667008,
"grad_norm": 4.125,
"learning_rate": 4.785331364883673e-06,
"loss": 0.5365,
"step": 5160
},
{
"epoch": 1.0587753430268279,
"grad_norm": 3.375,
"learning_rate": 4.768766040001536e-06,
"loss": 0.5169,
"step": 5170
},
{
"epoch": 1.0608232643866475,
"grad_norm": 4.78125,
"learning_rate": 4.752203258292312e-06,
"loss": 0.5385,
"step": 5180
},
{
"epoch": 1.0628711857464674,
"grad_norm": 3.234375,
"learning_rate": 4.735643201917903e-06,
"loss": 0.4745,
"step": 5190
},
{
"epoch": 1.0649191071062871,
"grad_norm": 3.84375,
"learning_rate": 4.7190860530102385e-06,
"loss": 0.5175,
"step": 5200
},
{
"epoch": 1.0669670284661068,
"grad_norm": 3.9375,
"learning_rate": 4.702531993669265e-06,
"loss": 0.5022,
"step": 5210
},
{
"epoch": 1.0690149498259267,
"grad_norm": 3.921875,
"learning_rate": 4.685981205960958e-06,
"loss": 0.4885,
"step": 5220
},
{
"epoch": 1.0710628711857464,
"grad_norm": 3.703125,
"learning_rate": 4.669433871915301e-06,
"loss": 0.5109,
"step": 5230
},
{
"epoch": 1.0731107925455663,
"grad_norm": 3.78125,
"learning_rate": 4.6528901735243e-06,
"loss": 0.4801,
"step": 5240
},
{
"epoch": 1.075158713905386,
"grad_norm": 4.125,
"learning_rate": 4.636350292739974e-06,
"loss": 0.5051,
"step": 5250
},
{
"epoch": 1.0772066352652059,
"grad_norm": 3.71875,
"learning_rate": 4.6198144114723506e-06,
"loss": 0.488,
"step": 5260
},
{
"epoch": 1.0792545566250256,
"grad_norm": 3.875,
"learning_rate": 4.603282711587478e-06,
"loss": 0.5006,
"step": 5270
},
{
"epoch": 1.0813024779848455,
"grad_norm": 4.25,
"learning_rate": 4.586755374905405e-06,
"loss": 0.4973,
"step": 5280
},
{
"epoch": 1.0833503993446651,
"grad_norm": 3.8125,
"learning_rate": 4.570232583198206e-06,
"loss": 0.5142,
"step": 5290
},
{
"epoch": 1.0853983207044848,
"grad_norm": 3.625,
"learning_rate": 4.553714518187955e-06,
"loss": 0.4319,
"step": 5300
},
{
"epoch": 1.0874462420643047,
"grad_norm": 4.03125,
"learning_rate": 4.537201361544751e-06,
"loss": 0.49,
"step": 5310
},
{
"epoch": 1.0894941634241244,
"grad_norm": 3.609375,
"learning_rate": 4.5206932948847075e-06,
"loss": 0.5164,
"step": 5320
},
{
"epoch": 1.0915420847839443,
"grad_norm": 4.09375,
"learning_rate": 4.50419049976795e-06,
"loss": 0.5184,
"step": 5330
},
{
"epoch": 1.093590006143764,
"grad_norm": 3.4375,
"learning_rate": 4.487693157696637e-06,
"loss": 0.4799,
"step": 5340
},
{
"epoch": 1.095637927503584,
"grad_norm": 3.625,
"learning_rate": 4.471201450112942e-06,
"loss": 0.4984,
"step": 5350
},
{
"epoch": 1.0976858488634036,
"grad_norm": 4.21875,
"learning_rate": 4.454715558397076e-06,
"loss": 0.5458,
"step": 5360
},
{
"epoch": 1.0997337702232235,
"grad_norm": 3.5,
"learning_rate": 4.438235663865288e-06,
"loss": 0.4502,
"step": 5370
},
{
"epoch": 1.1017816915830432,
"grad_norm": 3.125,
"learning_rate": 4.421761947767856e-06,
"loss": 0.4981,
"step": 5380
},
{
"epoch": 1.103829612942863,
"grad_norm": 3.40625,
"learning_rate": 4.405294591287122e-06,
"loss": 0.4796,
"step": 5390
},
{
"epoch": 1.1058775343026828,
"grad_norm": 3.359375,
"learning_rate": 4.388833775535469e-06,
"loss": 0.4936,
"step": 5400
},
{
"epoch": 1.1079254556625027,
"grad_norm": 3.265625,
"learning_rate": 4.372379681553352e-06,
"loss": 0.4897,
"step": 5410
},
{
"epoch": 1.1099733770223223,
"grad_norm": 3.328125,
"learning_rate": 4.3559324903072985e-06,
"loss": 0.506,
"step": 5420
},
{
"epoch": 1.112021298382142,
"grad_norm": 3.65625,
"learning_rate": 4.3394923826879096e-06,
"loss": 0.5046,
"step": 5430
},
{
"epoch": 1.114069219741962,
"grad_norm": 3.734375,
"learning_rate": 4.32305953950789e-06,
"loss": 0.5027,
"step": 5440
},
{
"epoch": 1.1161171411017816,
"grad_norm": 3.671875,
"learning_rate": 4.306634141500037e-06,
"loss": 0.5006,
"step": 5450
},
{
"epoch": 1.1181650624616015,
"grad_norm": 4.15625,
"learning_rate": 4.290216369315274e-06,
"loss": 0.5329,
"step": 5460
},
{
"epoch": 1.1202129838214212,
"grad_norm": 4.1875,
"learning_rate": 4.273806403520644e-06,
"loss": 0.4763,
"step": 5470
},
{
"epoch": 1.122260905181241,
"grad_norm": 3.6875,
"learning_rate": 4.257404424597342e-06,
"loss": 0.5092,
"step": 5480
},
{
"epoch": 1.1243088265410608,
"grad_norm": 3.03125,
"learning_rate": 4.241010612938719e-06,
"loss": 0.4719,
"step": 5490
},
{
"epoch": 1.1263567479008807,
"grad_norm": 3.21875,
"learning_rate": 4.224625148848292e-06,
"loss": 0.4901,
"step": 5500
},
{
"epoch": 1.1284046692607004,
"grad_norm": 3.296875,
"learning_rate": 4.208248212537783e-06,
"loss": 0.4323,
"step": 5510
},
{
"epoch": 1.13045259062052,
"grad_norm": 4.40625,
"learning_rate": 4.19187998412511e-06,
"loss": 0.4783,
"step": 5520
},
{
"epoch": 1.13250051198034,
"grad_norm": 3.65625,
"learning_rate": 4.175520643632428e-06,
"loss": 0.5215,
"step": 5530
},
{
"epoch": 1.1345484333401596,
"grad_norm": 5.09375,
"learning_rate": 4.15917037098414e-06,
"loss": 0.4825,
"step": 5540
},
{
"epoch": 1.1365963546999795,
"grad_norm": 4.0625,
"learning_rate": 4.142829346004911e-06,
"loss": 0.4754,
"step": 5550
},
{
"epoch": 1.1386442760597992,
"grad_norm": 3.96875,
"learning_rate": 4.126497748417708e-06,
"loss": 0.4967,
"step": 5560
},
{
"epoch": 1.1406921974196191,
"grad_norm": 3.4375,
"learning_rate": 4.110175757841802e-06,
"loss": 0.5174,
"step": 5570
},
{
"epoch": 1.1427401187794388,
"grad_norm": 4.25,
"learning_rate": 4.093863553790813e-06,
"loss": 0.5289,
"step": 5580
},
{
"epoch": 1.1447880401392587,
"grad_norm": 4.09375,
"learning_rate": 4.077561315670721e-06,
"loss": 0.5478,
"step": 5590
},
{
"epoch": 1.1468359614990784,
"grad_norm": 3.59375,
"learning_rate": 4.061269222777898e-06,
"loss": 0.4968,
"step": 5600
},
{
"epoch": 1.1488838828588983,
"grad_norm": 4.3125,
"learning_rate": 4.044987454297142e-06,
"loss": 0.4841,
"step": 5610
},
{
"epoch": 1.150931804218718,
"grad_norm": 3.625,
"learning_rate": 4.028716189299691e-06,
"loss": 0.4725,
"step": 5620
},
{
"epoch": 1.152979725578538,
"grad_norm": 4.4375,
"learning_rate": 4.01245560674127e-06,
"loss": 0.5094,
"step": 5630
},
{
"epoch": 1.1550276469383576,
"grad_norm": 3.265625,
"learning_rate": 3.996205885460112e-06,
"loss": 0.5184,
"step": 5640
},
{
"epoch": 1.1570755682981773,
"grad_norm": 3.9375,
"learning_rate": 3.979967204174998e-06,
"loss": 0.4978,
"step": 5650
},
{
"epoch": 1.1591234896579972,
"grad_norm": 4.09375,
"learning_rate": 3.963739741483285e-06,
"loss": 0.4841,
"step": 5660
},
{
"epoch": 1.1611714110178168,
"grad_norm": 3.9375,
"learning_rate": 3.947523675858945e-06,
"loss": 0.5245,
"step": 5670
},
{
"epoch": 1.1632193323776367,
"grad_norm": 3.703125,
"learning_rate": 3.931319185650606e-06,
"loss": 0.4784,
"step": 5680
},
{
"epoch": 1.1652672537374564,
"grad_norm": 3.703125,
"learning_rate": 3.915126449079582e-06,
"loss": 0.5205,
"step": 5690
},
{
"epoch": 1.1673151750972763,
"grad_norm": 4.6875,
"learning_rate": 3.898945644237919e-06,
"loss": 0.5475,
"step": 5700
},
{
"epoch": 1.169363096457096,
"grad_norm": 3.96875,
"learning_rate": 3.882776949086436e-06,
"loss": 0.5308,
"step": 5710
},
{
"epoch": 1.171411017816916,
"grad_norm": 4.28125,
"learning_rate": 3.866620541452764e-06,
"loss": 0.516,
"step": 5720
},
{
"epoch": 1.1734589391767356,
"grad_norm": 3.125,
"learning_rate": 3.850476599029394e-06,
"loss": 0.4846,
"step": 5730
},
{
"epoch": 1.1755068605365553,
"grad_norm": 4.0625,
"learning_rate": 3.834345299371719e-06,
"loss": 0.481,
"step": 5740
},
{
"epoch": 1.1775547818963752,
"grad_norm": 4.0,
"learning_rate": 3.818226819896089e-06,
"loss": 0.4901,
"step": 5750
},
{
"epoch": 1.1796027032561949,
"grad_norm": 3.953125,
"learning_rate": 3.802121337877848e-06,
"loss": 0.5132,
"step": 5760
},
{
"epoch": 1.1816506246160148,
"grad_norm": 4.15625,
"learning_rate": 3.7860290304493953e-06,
"loss": 0.4929,
"step": 5770
},
{
"epoch": 1.1836985459758345,
"grad_norm": 4.84375,
"learning_rate": 3.7699500745982287e-06,
"loss": 0.5142,
"step": 5780
},
{
"epoch": 1.1857464673356544,
"grad_norm": 3.5,
"learning_rate": 3.7538846471650038e-06,
"loss": 0.5093,
"step": 5790
},
{
"epoch": 1.187794388695474,
"grad_norm": 2.859375,
"learning_rate": 3.737832924841587e-06,
"loss": 0.4884,
"step": 5800
},
{
"epoch": 1.189842310055294,
"grad_norm": 3.625,
"learning_rate": 3.72179508416911e-06,
"loss": 0.4842,
"step": 5810
},
{
"epoch": 1.1918902314151136,
"grad_norm": 4.09375,
"learning_rate": 3.7057713015360365e-06,
"loss": 0.505,
"step": 5820
},
{
"epoch": 1.1939381527749335,
"grad_norm": 4.0625,
"learning_rate": 3.6897617531762086e-06,
"loss": 0.5513,
"step": 5830
},
{
"epoch": 1.1959860741347532,
"grad_norm": 3.46875,
"learning_rate": 3.6737666151669206e-06,
"loss": 0.5029,
"step": 5840
},
{
"epoch": 1.1980339954945731,
"grad_norm": 4.53125,
"learning_rate": 3.657786063426977e-06,
"loss": 0.4956,
"step": 5850
},
{
"epoch": 1.2000819168543928,
"grad_norm": 3.1875,
"learning_rate": 3.6418202737147566e-06,
"loss": 0.4914,
"step": 5860
},
{
"epoch": 1.2021298382142125,
"grad_norm": 3.671875,
"learning_rate": 3.6258694216262845e-06,
"loss": 0.5057,
"step": 5870
},
{
"epoch": 1.2041777595740324,
"grad_norm": 3.796875,
"learning_rate": 3.609933682593299e-06,
"loss": 0.4909,
"step": 5880
},
{
"epoch": 1.206225680933852,
"grad_norm": 3.609375,
"learning_rate": 3.5940132318813137e-06,
"loss": 0.5094,
"step": 5890
},
{
"epoch": 1.208273602293672,
"grad_norm": 3.328125,
"learning_rate": 3.578108244587705e-06,
"loss": 0.5162,
"step": 5900
},
{
"epoch": 1.2103215236534917,
"grad_norm": 3.625,
"learning_rate": 3.562218895639775e-06,
"loss": 0.5171,
"step": 5910
},
{
"epoch": 1.2123694450133116,
"grad_norm": 3.875,
"learning_rate": 3.5463453597928306e-06,
"loss": 0.5411,
"step": 5920
},
{
"epoch": 1.2144173663731312,
"grad_norm": 3.96875,
"learning_rate": 3.5304878116282654e-06,
"loss": 0.4972,
"step": 5930
},
{
"epoch": 1.2164652877329512,
"grad_norm": 4.375,
"learning_rate": 3.51464642555163e-06,
"loss": 0.5189,
"step": 5940
},
{
"epoch": 1.2185132090927708,
"grad_norm": 3.78125,
"learning_rate": 3.498821375790728e-06,
"loss": 0.5105,
"step": 5950
},
{
"epoch": 1.2205611304525905,
"grad_norm": 4.59375,
"learning_rate": 3.4830128363936835e-06,
"loss": 0.4979,
"step": 5960
},
{
"epoch": 1.2226090518124104,
"grad_norm": 4.65625,
"learning_rate": 3.467220981227042e-06,
"loss": 0.5311,
"step": 5970
},
{
"epoch": 1.22465697317223,
"grad_norm": 3.359375,
"learning_rate": 3.451445983973848e-06,
"loss": 0.4564,
"step": 5980
},
{
"epoch": 1.22670489453205,
"grad_norm": 4.125,
"learning_rate": 3.4356880181317377e-06,
"loss": 0.5113,
"step": 5990
},
{
"epoch": 1.2287528158918697,
"grad_norm": 4.1875,
"learning_rate": 3.419947257011036e-06,
"loss": 0.5245,
"step": 6000
},
{
"epoch": 1.2287528158918697,
"eval_loss": 0.5531573295593262,
"eval_runtime": 48.5342,
"eval_samples_per_second": 84.724,
"eval_steps_per_second": 42.362,
"step": 6000
},
{
"epoch": 1.2308007372516896,
"grad_norm": 4.34375,
"learning_rate": 3.404223873732837e-06,
"loss": 0.5338,
"step": 6010
},
{
"epoch": 1.2328486586115093,
"grad_norm": 3.90625,
"learning_rate": 3.3885180412271183e-06,
"loss": 0.5234,
"step": 6020
},
{
"epoch": 1.2348965799713292,
"grad_norm": 3.328125,
"learning_rate": 3.372829932230821e-06,
"loss": 0.5337,
"step": 6030
},
{
"epoch": 1.2369445013311489,
"grad_norm": 3.96875,
"learning_rate": 3.357159719285964e-06,
"loss": 0.5321,
"step": 6040
},
{
"epoch": 1.2389924226909685,
"grad_norm": 4.15625,
"learning_rate": 3.3415075747377414e-06,
"loss": 0.5118,
"step": 6050
},
{
"epoch": 1.2410403440507884,
"grad_norm": 3.453125,
"learning_rate": 3.325873670732619e-06,
"loss": 0.5142,
"step": 6060
},
{
"epoch": 1.2430882654106084,
"grad_norm": 4.1875,
"learning_rate": 3.3102581792164566e-06,
"loss": 0.4996,
"step": 6070
},
{
"epoch": 1.245136186770428,
"grad_norm": 4.9375,
"learning_rate": 3.294661271932601e-06,
"loss": 0.5165,
"step": 6080
},
{
"epoch": 1.2471841081302477,
"grad_norm": 4.5,
"learning_rate": 3.2790831204200113e-06,
"loss": 0.5101,
"step": 6090
},
{
"epoch": 1.2492320294900676,
"grad_norm": 3.21875,
"learning_rate": 3.2635238960113634e-06,
"loss": 0.5147,
"step": 6100
},
{
"epoch": 1.2512799508498873,
"grad_norm": 3.75,
"learning_rate": 3.2479837698311646e-06,
"loss": 0.5224,
"step": 6110
},
{
"epoch": 1.2533278722097072,
"grad_norm": 3.375,
"learning_rate": 3.23246291279388e-06,
"loss": 0.4954,
"step": 6120
},
{
"epoch": 1.2553757935695269,
"grad_norm": 4.3125,
"learning_rate": 3.2169614956020423e-06,
"loss": 0.4957,
"step": 6130
},
{
"epoch": 1.2574237149293468,
"grad_norm": 3.390625,
"learning_rate": 3.2014796887443854e-06,
"loss": 0.4907,
"step": 6140
},
{
"epoch": 1.2594716362891665,
"grad_norm": 3.46875,
"learning_rate": 3.1860176624939566e-06,
"loss": 0.4758,
"step": 6150
},
{
"epoch": 1.2615195576489864,
"grad_norm": 3.65625,
"learning_rate": 3.1705755869062553e-06,
"loss": 0.4639,
"step": 6160
},
{
"epoch": 1.263567479008806,
"grad_norm": 3.6875,
"learning_rate": 3.1551536318173613e-06,
"loss": 0.5144,
"step": 6170
},
{
"epoch": 1.2656154003686257,
"grad_norm": 4.25,
"learning_rate": 3.139751966842054e-06,
"loss": 0.5526,
"step": 6180
},
{
"epoch": 1.2676633217284456,
"grad_norm": 4.625,
"learning_rate": 3.124370761371968e-06,
"loss": 0.4946,
"step": 6190
},
{
"epoch": 1.2697112430882653,
"grad_norm": 3.984375,
"learning_rate": 3.1090101845737084e-06,
"loss": 0.4808,
"step": 6200
},
{
"epoch": 1.2717591644480852,
"grad_norm": 3.875,
"learning_rate": 3.0936704053870083e-06,
"loss": 0.5139,
"step": 6210
},
{
"epoch": 1.273807085807905,
"grad_norm": 3.625,
"learning_rate": 3.0783515925228626e-06,
"loss": 0.4892,
"step": 6220
},
{
"epoch": 1.2758550071677248,
"grad_norm": 4.53125,
"learning_rate": 3.0630539144616646e-06,
"loss": 0.4363,
"step": 6230
},
{
"epoch": 1.2779029285275445,
"grad_norm": 3.828125,
"learning_rate": 3.0477775394513743e-06,
"loss": 0.4816,
"step": 6240
},
{
"epoch": 1.2799508498873644,
"grad_norm": 4.09375,
"learning_rate": 3.0325226355056425e-06,
"loss": 0.5106,
"step": 6250
},
{
"epoch": 1.281998771247184,
"grad_norm": 3.9375,
"learning_rate": 3.0172893704019846e-06,
"loss": 0.4663,
"step": 6260
},
{
"epoch": 1.2840466926070038,
"grad_norm": 3.90625,
"learning_rate": 3.0020779116799236e-06,
"loss": 0.5006,
"step": 6270
},
{
"epoch": 1.2860946139668237,
"grad_norm": 3.28125,
"learning_rate": 2.9868884266391464e-06,
"loss": 0.511,
"step": 6280
},
{
"epoch": 1.2881425353266436,
"grad_norm": 4.65625,
"learning_rate": 2.9717210823376742e-06,
"loss": 0.4877,
"step": 6290
},
{
"epoch": 1.2901904566864633,
"grad_norm": 3.90625,
"learning_rate": 2.9565760455900106e-06,
"loss": 0.4974,
"step": 6300
},
{
"epoch": 1.292238378046283,
"grad_norm": 3.09375,
"learning_rate": 2.941453482965323e-06,
"loss": 0.5354,
"step": 6310
},
{
"epoch": 1.2942862994061028,
"grad_norm": 4.125,
"learning_rate": 2.926353560785594e-06,
"loss": 0.5125,
"step": 6320
},
{
"epoch": 1.2963342207659225,
"grad_norm": 3.828125,
"learning_rate": 2.9112764451238074e-06,
"loss": 0.5341,
"step": 6330
},
{
"epoch": 1.2983821421257424,
"grad_norm": 3.609375,
"learning_rate": 2.8962223018021116e-06,
"loss": 0.4633,
"step": 6340
},
{
"epoch": 1.3004300634855621,
"grad_norm": 3.828125,
"learning_rate": 2.8811912963900013e-06,
"loss": 0.4744,
"step": 6350
},
{
"epoch": 1.302477984845382,
"grad_norm": 3.9375,
"learning_rate": 2.8661835942024915e-06,
"loss": 0.5308,
"step": 6360
},
{
"epoch": 1.3045259062052017,
"grad_norm": 3.65625,
"learning_rate": 2.8511993602983023e-06,
"loss": 0.5155,
"step": 6370
},
{
"epoch": 1.3065738275650216,
"grad_norm": 3.578125,
"learning_rate": 2.836238759478045e-06,
"loss": 0.4424,
"step": 6380
},
{
"epoch": 1.3086217489248413,
"grad_norm": 3.984375,
"learning_rate": 2.821301956282408e-06,
"loss": 0.5507,
"step": 6390
},
{
"epoch": 1.310669670284661,
"grad_norm": 3.234375,
"learning_rate": 2.806389114990345e-06,
"loss": 0.5209,
"step": 6400
},
{
"epoch": 1.3127175916444809,
"grad_norm": 4.0625,
"learning_rate": 2.7915003996172724e-06,
"loss": 0.5002,
"step": 6410
},
{
"epoch": 1.3147655130043006,
"grad_norm": 3.46875,
"learning_rate": 2.776635973913262e-06,
"loss": 0.4994,
"step": 6420
},
{
"epoch": 1.3168134343641205,
"grad_norm": 3.109375,
"learning_rate": 2.761796001361241e-06,
"loss": 0.4868,
"step": 6430
},
{
"epoch": 1.3188613557239401,
"grad_norm": 3.703125,
"learning_rate": 2.746980645175199e-06,
"loss": 0.4633,
"step": 6440
},
{
"epoch": 1.32090927708376,
"grad_norm": 3.9375,
"learning_rate": 2.732190068298378e-06,
"loss": 0.5256,
"step": 6450
},
{
"epoch": 1.3229571984435797,
"grad_norm": 3.75,
"learning_rate": 2.7174244334015025e-06,
"loss": 0.5183,
"step": 6460
},
{
"epoch": 1.3250051198033996,
"grad_norm": 4.15625,
"learning_rate": 2.7026839028809704e-06,
"loss": 0.5399,
"step": 6470
},
{
"epoch": 1.3270530411632193,
"grad_norm": 4.125,
"learning_rate": 2.6879686388570782e-06,
"loss": 0.5054,
"step": 6480
},
{
"epoch": 1.329100962523039,
"grad_norm": 3.984375,
"learning_rate": 2.6732788031722325e-06,
"loss": 0.4731,
"step": 6490
},
{
"epoch": 1.331148883882859,
"grad_norm": 4.59375,
"learning_rate": 2.658614557389174e-06,
"loss": 0.4896,
"step": 6500
},
{
"epoch": 1.3331968052426788,
"grad_norm": 3.796875,
"learning_rate": 2.6439760627892e-06,
"loss": 0.5294,
"step": 6510
},
{
"epoch": 1.3352447266024985,
"grad_norm": 3.421875,
"learning_rate": 2.6293634803703853e-06,
"loss": 0.5028,
"step": 6520
},
{
"epoch": 1.3372926479623182,
"grad_norm": 3.96875,
"learning_rate": 2.61477697084582e-06,
"loss": 0.4852,
"step": 6530
},
{
"epoch": 1.339340569322138,
"grad_norm": 4.34375,
"learning_rate": 2.600216694641835e-06,
"loss": 0.4643,
"step": 6540
},
{
"epoch": 1.3413884906819578,
"grad_norm": 4.15625,
"learning_rate": 2.5856828118962385e-06,
"loss": 0.4863,
"step": 6550
},
{
"epoch": 1.3434364120417777,
"grad_norm": 3.453125,
"learning_rate": 2.571175482456565e-06,
"loss": 0.5265,
"step": 6560
},
{
"epoch": 1.3454843334015973,
"grad_norm": 3.453125,
"learning_rate": 2.5566948658782947e-06,
"loss": 0.512,
"step": 6570
},
{
"epoch": 1.347532254761417,
"grad_norm": 3.65625,
"learning_rate": 2.5422411214231272e-06,
"loss": 0.453,
"step": 6580
},
{
"epoch": 1.349580176121237,
"grad_norm": 4.25,
"learning_rate": 2.5278144080572013e-06,
"loss": 0.4624,
"step": 6590
},
{
"epoch": 1.3516280974810568,
"grad_norm": 3.84375,
"learning_rate": 2.513414884449373e-06,
"loss": 0.5362,
"step": 6600
},
{
"epoch": 1.3536760188408765,
"grad_norm": 3.6875,
"learning_rate": 2.4990427089694517e-06,
"loss": 0.466,
"step": 6610
},
{
"epoch": 1.3557239402006962,
"grad_norm": 3.828125,
"learning_rate": 2.484698039686465e-06,
"loss": 0.513,
"step": 6620
},
{
"epoch": 1.357771861560516,
"grad_norm": 4.0625,
"learning_rate": 2.4703810343669204e-06,
"loss": 0.4675,
"step": 6630
},
{
"epoch": 1.3598197829203358,
"grad_norm": 4.28125,
"learning_rate": 2.4560918504730712e-06,
"loss": 0.461,
"step": 6640
},
{
"epoch": 1.3618677042801557,
"grad_norm": 3.6875,
"learning_rate": 2.4418306451611816e-06,
"loss": 0.4595,
"step": 6650
},
{
"epoch": 1.3639156256399754,
"grad_norm": 3.875,
"learning_rate": 2.427597575279801e-06,
"loss": 0.5385,
"step": 6660
},
{
"epoch": 1.3659635469997953,
"grad_norm": 4.03125,
"learning_rate": 2.413392797368034e-06,
"loss": 0.4778,
"step": 6670
},
{
"epoch": 1.368011468359615,
"grad_norm": 4.9375,
"learning_rate": 2.3992164676538336e-06,
"loss": 0.5586,
"step": 6680
},
{
"epoch": 1.3700593897194349,
"grad_norm": 3.671875,
"learning_rate": 2.385068742052255e-06,
"loss": 0.4678,
"step": 6690
},
{
"epoch": 1.3721073110792545,
"grad_norm": 3.921875,
"learning_rate": 2.3709497761637744e-06,
"loss": 0.5287,
"step": 6700
},
{
"epoch": 1.3741552324390742,
"grad_norm": 4.59375,
"learning_rate": 2.3568597252725466e-06,
"loss": 0.5866,
"step": 6710
},
{
"epoch": 1.3762031537988941,
"grad_norm": 3.75,
"learning_rate": 2.3427987443447237e-06,
"loss": 0.5044,
"step": 6720
},
{
"epoch": 1.378251075158714,
"grad_norm": 4.15625,
"learning_rate": 2.3287669880267317e-06,
"loss": 0.5611,
"step": 6730
},
{
"epoch": 1.3802989965185337,
"grad_norm": 3.65625,
"learning_rate": 2.3147646106435773e-06,
"loss": 0.4826,
"step": 6740
},
{
"epoch": 1.3823469178783534,
"grad_norm": 3.5,
"learning_rate": 2.300791766197151e-06,
"loss": 0.4959,
"step": 6750
},
{
"epoch": 1.3843948392381733,
"grad_norm": 3.84375,
"learning_rate": 2.2868486083645325e-06,
"loss": 0.4908,
"step": 6760
},
{
"epoch": 1.386442760597993,
"grad_norm": 3.75,
"learning_rate": 2.272935290496297e-06,
"loss": 0.5306,
"step": 6770
},
{
"epoch": 1.3884906819578129,
"grad_norm": 4.0625,
"learning_rate": 2.259051965614839e-06,
"loss": 0.5183,
"step": 6780
},
{
"epoch": 1.3905386033176326,
"grad_norm": 4.25,
"learning_rate": 2.2451987864126712e-06,
"loss": 0.5608,
"step": 6790
},
{
"epoch": 1.3925865246774523,
"grad_norm": 4.40625,
"learning_rate": 2.231375905250769e-06,
"loss": 0.5404,
"step": 6800
},
{
"epoch": 1.3946344460372722,
"grad_norm": 4.375,
"learning_rate": 2.2175834741568677e-06,
"loss": 0.5083,
"step": 6810
},
{
"epoch": 1.396682367397092,
"grad_norm": 3.265625,
"learning_rate": 2.2038216448238185e-06,
"loss": 0.4536,
"step": 6820
},
{
"epoch": 1.3987302887569117,
"grad_norm": 3.328125,
"learning_rate": 2.1900905686078945e-06,
"loss": 0.4988,
"step": 6830
},
{
"epoch": 1.4007782101167314,
"grad_norm": 3.421875,
"learning_rate": 2.1763903965271466e-06,
"loss": 0.5205,
"step": 6840
},
{
"epoch": 1.4028261314765513,
"grad_norm": 4.3125,
"learning_rate": 2.162721279259729e-06,
"loss": 0.5292,
"step": 6850
},
{
"epoch": 1.404874052836371,
"grad_norm": 3.765625,
"learning_rate": 2.1490833671422484e-06,
"loss": 0.484,
"step": 6860
},
{
"epoch": 1.406921974196191,
"grad_norm": 3.8125,
"learning_rate": 2.1354768101681077e-06,
"loss": 0.462,
"step": 6870
},
{
"epoch": 1.4089698955560106,
"grad_norm": 3.75,
"learning_rate": 2.1219017579858586e-06,
"loss": 0.5473,
"step": 6880
},
{
"epoch": 1.4110178169158305,
"grad_norm": 4.25,
"learning_rate": 2.108358359897553e-06,
"loss": 0.4682,
"step": 6890
},
{
"epoch": 1.4130657382756502,
"grad_norm": 3.765625,
"learning_rate": 2.0948467648571085e-06,
"loss": 0.516,
"step": 6900
},
{
"epoch": 1.41511365963547,
"grad_norm": 4.375,
"learning_rate": 2.0813671214686533e-06,
"loss": 0.5238,
"step": 6910
},
{
"epoch": 1.4171615809952898,
"grad_norm": 4.15625,
"learning_rate": 2.067919577984916e-06,
"loss": 0.4904,
"step": 6920
},
{
"epoch": 1.4192095023551095,
"grad_norm": 4.375,
"learning_rate": 2.054504282305569e-06,
"loss": 0.543,
"step": 6930
},
{
"epoch": 1.4212574237149294,
"grad_norm": 3.78125,
"learning_rate": 2.041121381975624e-06,
"loss": 0.4894,
"step": 6940
},
{
"epoch": 1.4233053450747493,
"grad_norm": 3.25,
"learning_rate": 2.027771024183798e-06,
"loss": 0.4765,
"step": 6950
},
{
"epoch": 1.425353266434569,
"grad_norm": 3.28125,
"learning_rate": 2.0144533557608925e-06,
"loss": 0.4883,
"step": 6960
},
{
"epoch": 1.4274011877943886,
"grad_norm": 3.609375,
"learning_rate": 2.0011685231781876e-06,
"loss": 0.5183,
"step": 6970
},
{
"epoch": 1.4294491091542085,
"grad_norm": 3.9375,
"learning_rate": 1.987916672545822e-06,
"loss": 0.5087,
"step": 6980
},
{
"epoch": 1.4314970305140282,
"grad_norm": 3.984375,
"learning_rate": 1.9746979496111936e-06,
"loss": 0.4799,
"step": 6990
},
{
"epoch": 1.4335449518738481,
"grad_norm": 4.78125,
"learning_rate": 1.96151249975735e-06,
"loss": 0.4663,
"step": 7000
},
{
"epoch": 1.4335449518738481,
"eval_loss": 0.5521626472473145,
"eval_runtime": 50.1391,
"eval_samples_per_second": 82.012,
"eval_steps_per_second": 41.006,
"step": 7000
},
{
"epoch": 1.4355928732336678,
"grad_norm": 4.03125,
"learning_rate": 1.948360468001393e-06,
"loss": 0.5628,
"step": 7010
},
{
"epoch": 1.4376407945934875,
"grad_norm": 4.375,
"learning_rate": 1.935241998992889e-06,
"loss": 0.4884,
"step": 7020
},
{
"epoch": 1.4396887159533074,
"grad_norm": 4.1875,
"learning_rate": 1.9221572370122606e-06,
"loss": 0.5088,
"step": 7030
},
{
"epoch": 1.4417366373131273,
"grad_norm": 3.90625,
"learning_rate": 1.9091063259692255e-06,
"loss": 0.4645,
"step": 7040
},
{
"epoch": 1.443784558672947,
"grad_norm": 4.0625,
"learning_rate": 1.896089409401185e-06,
"loss": 0.5207,
"step": 7050
},
{
"epoch": 1.4458324800327667,
"grad_norm": 5.15625,
"learning_rate": 1.8831066304716738e-06,
"loss": 0.5466,
"step": 7060
},
{
"epoch": 1.4478804013925866,
"grad_norm": 4.125,
"learning_rate": 1.8701581319687634e-06,
"loss": 0.5313,
"step": 7070
},
{
"epoch": 1.4499283227524062,
"grad_norm": 4.1875,
"learning_rate": 1.8572440563035016e-06,
"loss": 0.523,
"step": 7080
},
{
"epoch": 1.4519762441122261,
"grad_norm": 3.9375,
"learning_rate": 1.8443645455083465e-06,
"loss": 0.4837,
"step": 7090
},
{
"epoch": 1.4540241654720458,
"grad_norm": 3.765625,
"learning_rate": 1.8315197412356006e-06,
"loss": 0.4952,
"step": 7100
},
{
"epoch": 1.4560720868318657,
"grad_norm": 3.515625,
"learning_rate": 1.8187097847558532e-06,
"loss": 0.4919,
"step": 7110
},
{
"epoch": 1.4581200081916854,
"grad_norm": 4.09375,
"learning_rate": 1.8059348169564366e-06,
"loss": 0.4318,
"step": 7120
},
{
"epoch": 1.4601679295515053,
"grad_norm": 3.828125,
"learning_rate": 1.793194978339855e-06,
"loss": 0.4651,
"step": 7130
},
{
"epoch": 1.462215850911325,
"grad_norm": 3.5625,
"learning_rate": 1.7804904090222664e-06,
"loss": 0.4786,
"step": 7140
},
{
"epoch": 1.4642637722711447,
"grad_norm": 3.890625,
"learning_rate": 1.767821248731913e-06,
"loss": 0.4668,
"step": 7150
},
{
"epoch": 1.4663116936309646,
"grad_norm": 4.125,
"learning_rate": 1.7551876368076154e-06,
"loss": 0.5105,
"step": 7160
},
{
"epoch": 1.4683596149907845,
"grad_norm": 4.40625,
"learning_rate": 1.7425897121972068e-06,
"loss": 0.501,
"step": 7170
},
{
"epoch": 1.4704075363506042,
"grad_norm": 3.09375,
"learning_rate": 1.7300276134560367e-06,
"loss": 0.5251,
"step": 7180
},
{
"epoch": 1.4724554577104239,
"grad_norm": 4.15625,
"learning_rate": 1.717501478745423e-06,
"loss": 0.5332,
"step": 7190
},
{
"epoch": 1.4745033790702438,
"grad_norm": 4.28125,
"learning_rate": 1.7050114458311446e-06,
"loss": 0.4896,
"step": 7200
},
{
"epoch": 1.4765513004300634,
"grad_norm": 4.5625,
"learning_rate": 1.6925576520819225e-06,
"loss": 0.5556,
"step": 7210
},
{
"epoch": 1.4785992217898833,
"grad_norm": 4.625,
"learning_rate": 1.6801402344679102e-06,
"loss": 0.5056,
"step": 7220
},
{
"epoch": 1.480647143149703,
"grad_norm": 3.296875,
"learning_rate": 1.6677593295591848e-06,
"loss": 0.5639,
"step": 7230
},
{
"epoch": 1.4826950645095227,
"grad_norm": 4.28125,
"learning_rate": 1.6554150735242535e-06,
"loss": 0.5681,
"step": 7240
},
{
"epoch": 1.4847429858693426,
"grad_norm": 4.4375,
"learning_rate": 1.6431076021285381e-06,
"loss": 0.4984,
"step": 7250
},
{
"epoch": 1.4867909072291625,
"grad_norm": 3.46875,
"learning_rate": 1.6308370507329057e-06,
"loss": 0.453,
"step": 7260
},
{
"epoch": 1.4888388285889822,
"grad_norm": 4.40625,
"learning_rate": 1.6186035542921546e-06,
"loss": 0.5102,
"step": 7270
},
{
"epoch": 1.4908867499488019,
"grad_norm": 4.09375,
"learning_rate": 1.6064072473535546e-06,
"loss": 0.499,
"step": 7280
},
{
"epoch": 1.4929346713086218,
"grad_norm": 4.65625,
"learning_rate": 1.5942482640553486e-06,
"loss": 0.5314,
"step": 7290
},
{
"epoch": 1.4949825926684415,
"grad_norm": 3.84375,
"learning_rate": 1.5821267381252858e-06,
"loss": 0.4847,
"step": 7300
},
{
"epoch": 1.4970305140282614,
"grad_norm": 3.40625,
"learning_rate": 1.570042802879148e-06,
"loss": 0.5141,
"step": 7310
},
{
"epoch": 1.499078435388081,
"grad_norm": 4.46875,
"learning_rate": 1.5579965912192873e-06,
"loss": 0.4914,
"step": 7320
},
{
"epoch": 1.5011263567479007,
"grad_norm": 3.453125,
"learning_rate": 1.5459882356331596e-06,
"loss": 0.5131,
"step": 7330
},
{
"epoch": 1.5031742781077206,
"grad_norm": 3.21875,
"learning_rate": 1.53401786819187e-06,
"loss": 0.4868,
"step": 7340
},
{
"epoch": 1.5052221994675405,
"grad_norm": 3.6875,
"learning_rate": 1.5220856205487183e-06,
"loss": 0.485,
"step": 7350
},
{
"epoch": 1.5072701208273602,
"grad_norm": 3.890625,
"learning_rate": 1.510191623937759e-06,
"loss": 0.4816,
"step": 7360
},
{
"epoch": 1.50931804218718,
"grad_norm": 3.546875,
"learning_rate": 1.498336009172341e-06,
"loss": 0.5506,
"step": 7370
},
{
"epoch": 1.5113659635469998,
"grad_norm": 4.1875,
"learning_rate": 1.4865189066436909e-06,
"loss": 0.4692,
"step": 7380
},
{
"epoch": 1.5134138849068197,
"grad_norm": 4.5,
"learning_rate": 1.4747404463194553e-06,
"loss": 0.5319,
"step": 7390
},
{
"epoch": 1.5154618062666394,
"grad_norm": 3.546875,
"learning_rate": 1.4630007577422949e-06,
"loss": 0.4945,
"step": 7400
},
{
"epoch": 1.517509727626459,
"grad_norm": 3.375,
"learning_rate": 1.451299970028442e-06,
"loss": 0.5442,
"step": 7410
},
{
"epoch": 1.5195576489862788,
"grad_norm": 4.3125,
"learning_rate": 1.4396382118662877e-06,
"loss": 0.4956,
"step": 7420
},
{
"epoch": 1.5216055703460987,
"grad_norm": 4.3125,
"learning_rate": 1.4280156115149667e-06,
"loss": 0.531,
"step": 7430
},
{
"epoch": 1.5236534917059186,
"grad_norm": 3.671875,
"learning_rate": 1.4164322968029442e-06,
"loss": 0.4683,
"step": 7440
},
{
"epoch": 1.5257014130657383,
"grad_norm": 3.5,
"learning_rate": 1.4048883951266135e-06,
"loss": 0.5077,
"step": 7450
},
{
"epoch": 1.527749334425558,
"grad_norm": 3.765625,
"learning_rate": 1.3933840334488903e-06,
"loss": 0.4806,
"step": 7460
},
{
"epoch": 1.5297972557853778,
"grad_norm": 4.71875,
"learning_rate": 1.3819193382978207e-06,
"loss": 0.452,
"step": 7470
},
{
"epoch": 1.5318451771451977,
"grad_norm": 3.703125,
"learning_rate": 1.3704944357651872e-06,
"loss": 0.4668,
"step": 7480
},
{
"epoch": 1.5338930985050174,
"grad_norm": 3.78125,
"learning_rate": 1.3591094515051223e-06,
"loss": 0.4809,
"step": 7490
},
{
"epoch": 1.535941019864837,
"grad_norm": 3.6875,
"learning_rate": 1.347764510732727e-06,
"loss": 0.5493,
"step": 7500
},
{
"epoch": 1.537988941224657,
"grad_norm": 3.375,
"learning_rate": 1.3364597382226908e-06,
"loss": 0.5105,
"step": 7510
},
{
"epoch": 1.540036862584477,
"grad_norm": 3.90625,
"learning_rate": 1.325195258307928e-06,
"loss": 0.5163,
"step": 7520
},
{
"epoch": 1.5420847839442966,
"grad_norm": 3.375,
"learning_rate": 1.313971194878198e-06,
"loss": 0.495,
"step": 7530
},
{
"epoch": 1.5441327053041163,
"grad_norm": 3.890625,
"learning_rate": 1.3027876713787524e-06,
"loss": 0.4762,
"step": 7540
},
{
"epoch": 1.546180626663936,
"grad_norm": 3.78125,
"learning_rate": 1.2916448108089713e-06,
"loss": 0.4488,
"step": 7550
},
{
"epoch": 1.5482285480237559,
"grad_norm": 3.59375,
"learning_rate": 1.2805427357210154e-06,
"loss": 0.4543,
"step": 7560
},
{
"epoch": 1.5502764693835758,
"grad_norm": 3.890625,
"learning_rate": 1.2694815682184747e-06,
"loss": 0.5251,
"step": 7570
},
{
"epoch": 1.5523243907433955,
"grad_norm": 4.34375,
"learning_rate": 1.2584614299550257e-06,
"loss": 0.5251,
"step": 7580
},
{
"epoch": 1.5543723121032151,
"grad_norm": 3.359375,
"learning_rate": 1.2474824421330962e-06,
"loss": 0.4684,
"step": 7590
},
{
"epoch": 1.556420233463035,
"grad_norm": 4.15625,
"learning_rate": 1.2365447255025293e-06,
"loss": 0.4835,
"step": 7600
},
{
"epoch": 1.558468154822855,
"grad_norm": 4.40625,
"learning_rate": 1.2256484003592572e-06,
"loss": 0.4605,
"step": 7610
},
{
"epoch": 1.5605160761826746,
"grad_norm": 4.3125,
"learning_rate": 1.2147935865439747e-06,
"loss": 0.4669,
"step": 7620
},
{
"epoch": 1.5625639975424943,
"grad_norm": 3.703125,
"learning_rate": 1.2039804034408315e-06,
"loss": 0.4901,
"step": 7630
},
{
"epoch": 1.564611918902314,
"grad_norm": 3.484375,
"learning_rate": 1.1932089699760995e-06,
"loss": 0.5078,
"step": 7640
},
{
"epoch": 1.566659840262134,
"grad_norm": 4.28125,
"learning_rate": 1.1824794046168892e-06,
"loss": 0.4499,
"step": 7650
},
{
"epoch": 1.5687077616219538,
"grad_norm": 4.71875,
"learning_rate": 1.1717918253698273e-06,
"loss": 0.5427,
"step": 7660
},
{
"epoch": 1.5707556829817735,
"grad_norm": 3.296875,
"learning_rate": 1.1611463497797676e-06,
"loss": 0.4713,
"step": 7670
},
{
"epoch": 1.5728036043415932,
"grad_norm": 4.15625,
"learning_rate": 1.1505430949284962e-06,
"loss": 0.511,
"step": 7680
},
{
"epoch": 1.574851525701413,
"grad_norm": 4.03125,
"learning_rate": 1.1399821774334457e-06,
"loss": 0.5032,
"step": 7690
},
{
"epoch": 1.576899447061233,
"grad_norm": 3.890625,
"learning_rate": 1.1294637134464104e-06,
"loss": 0.4683,
"step": 7700
},
{
"epoch": 1.5789473684210527,
"grad_norm": 4.21875,
"learning_rate": 1.1189878186522684e-06,
"loss": 0.4678,
"step": 7710
},
{
"epoch": 1.5809952897808723,
"grad_norm": 4.40625,
"learning_rate": 1.1085546082677123e-06,
"loss": 0.4456,
"step": 7720
},
{
"epoch": 1.5830432111406922,
"grad_norm": 4.0625,
"learning_rate": 1.0981641970399786e-06,
"loss": 0.4764,
"step": 7730
},
{
"epoch": 1.5850911325005121,
"grad_norm": 4.03125,
"learning_rate": 1.0878166992455874e-06,
"loss": 0.4918,
"step": 7740
},
{
"epoch": 1.5871390538603318,
"grad_norm": 3.65625,
"learning_rate": 1.0775122286890894e-06,
"loss": 0.4753,
"step": 7750
},
{
"epoch": 1.5891869752201515,
"grad_norm": 3.96875,
"learning_rate": 1.0672508987018016e-06,
"loss": 0.4841,
"step": 7760
},
{
"epoch": 1.5912348965799712,
"grad_norm": 4.21875,
"learning_rate": 1.0570328221405796e-06,
"loss": 0.5167,
"step": 7770
},
{
"epoch": 1.593282817939791,
"grad_norm": 3.578125,
"learning_rate": 1.046858111386556e-06,
"loss": 0.5181,
"step": 7780
},
{
"epoch": 1.595330739299611,
"grad_norm": 3.828125,
"learning_rate": 1.0367268783439249e-06,
"loss": 0.4854,
"step": 7790
},
{
"epoch": 1.5973786606594307,
"grad_norm": 3.46875,
"learning_rate": 1.0266392344386939e-06,
"loss": 0.4486,
"step": 7800
},
{
"epoch": 1.5994265820192504,
"grad_norm": 4.5,
"learning_rate": 1.0165952906174675e-06,
"loss": 0.4954,
"step": 7810
},
{
"epoch": 1.6014745033790703,
"grad_norm": 4.03125,
"learning_rate": 1.006595157346225e-06,
"loss": 0.4548,
"step": 7820
},
{
"epoch": 1.6035224247388902,
"grad_norm": 3.875,
"learning_rate": 9.966389446091068e-07,
"loss": 0.5134,
"step": 7830
},
{
"epoch": 1.6055703460987099,
"grad_norm": 3.703125,
"learning_rate": 9.867267619072013e-07,
"loss": 0.4977,
"step": 7840
},
{
"epoch": 1.6076182674585295,
"grad_norm": 4.09375,
"learning_rate": 9.768587182573442e-07,
"loss": 0.4621,
"step": 7850
},
{
"epoch": 1.6096661888183492,
"grad_norm": 3.859375,
"learning_rate": 9.67034922190917e-07,
"loss": 0.5124,
"step": 7860
},
{
"epoch": 1.6117141101781691,
"grad_norm": 4.65625,
"learning_rate": 9.572554817526592e-07,
"loss": 0.5,
"step": 7870
},
{
"epoch": 1.613762031537989,
"grad_norm": 4.15625,
"learning_rate": 9.475205044994651e-07,
"loss": 0.44,
"step": 7880
},
{
"epoch": 1.6158099528978087,
"grad_norm": 3.453125,
"learning_rate": 9.378300974992238e-07,
"loss": 0.5015,
"step": 7890
},
{
"epoch": 1.6178578742576284,
"grad_norm": 3.8125,
"learning_rate": 9.281843673296165e-07,
"loss": 0.486,
"step": 7900
},
{
"epoch": 1.6199057956174483,
"grad_norm": 3.5625,
"learning_rate": 9.185834200769662e-07,
"loss": 0.4988,
"step": 7910
},
{
"epoch": 1.6219537169772682,
"grad_norm": 3.765625,
"learning_rate": 9.090273613350564e-07,
"loss": 0.5192,
"step": 7920
},
{
"epoch": 1.6240016383370879,
"grad_norm": 4.0,
"learning_rate": 8.995162962039761e-07,
"loss": 0.512,
"step": 7930
},
{
"epoch": 1.6260495596969076,
"grad_norm": 4.1875,
"learning_rate": 8.900503292889628e-07,
"loss": 0.5336,
"step": 7940
},
{
"epoch": 1.6280974810567275,
"grad_norm": 3.828125,
"learning_rate": 8.806295646992508e-07,
"loss": 0.4872,
"step": 7950
},
{
"epoch": 1.6301454024165472,
"grad_norm": 3.28125,
"learning_rate": 8.712541060469271e-07,
"loss": 0.5295,
"step": 7960
},
{
"epoch": 1.632193323776367,
"grad_norm": 4.03125,
"learning_rate": 8.61924056445796e-07,
"loss": 0.4899,
"step": 7970
},
{
"epoch": 1.6342412451361867,
"grad_norm": 3.5625,
"learning_rate": 8.526395185102321e-07,
"loss": 0.4615,
"step": 7980
},
{
"epoch": 1.6362891664960064,
"grad_norm": 3.734375,
"learning_rate": 8.434005943540718e-07,
"loss": 0.4868,
"step": 7990
},
{
"epoch": 1.6383370878558263,
"grad_norm": 4.3125,
"learning_rate": 8.342073855894673e-07,
"loss": 0.5378,
"step": 8000
},
{
"epoch": 1.6383370878558263,
"eval_loss": 0.5521250367164612,
"eval_runtime": 50.5238,
"eval_samples_per_second": 81.387,
"eval_steps_per_second": 40.694,
"step": 8000
},
{
"epoch": 1.6403850092156462,
"grad_norm": 4.03125,
"learning_rate": 8.250599933257919e-07,
"loss": 0.4721,
"step": 8010
},
{
"epoch": 1.642432930575466,
"grad_norm": 3.71875,
"learning_rate": 8.15958518168506e-07,
"loss": 0.4715,
"step": 8020
},
{
"epoch": 1.6444808519352856,
"grad_norm": 3.859375,
"learning_rate": 8.069030602180705e-07,
"loss": 0.5022,
"step": 8030
},
{
"epoch": 1.6465287732951055,
"grad_norm": 4.34375,
"learning_rate": 7.978937190688318e-07,
"loss": 0.5166,
"step": 8040
},
{
"epoch": 1.6485766946549254,
"grad_norm": 3.796875,
"learning_rate": 7.889305938079328e-07,
"loss": 0.5324,
"step": 8050
},
{
"epoch": 1.650624616014745,
"grad_norm": 3.078125,
"learning_rate": 7.800137830142212e-07,
"loss": 0.4688,
"step": 8060
},
{
"epoch": 1.6526725373745648,
"grad_norm": 4.03125,
"learning_rate": 7.711433847571664e-07,
"loss": 0.5302,
"step": 8070
},
{
"epoch": 1.6547204587343844,
"grad_norm": 3.8125,
"learning_rate": 7.623194965957786e-07,
"loss": 0.4856,
"step": 8080
},
{
"epoch": 1.6567683800942044,
"grad_norm": 3.984375,
"learning_rate": 7.535422155775423e-07,
"loss": 0.5046,
"step": 8090
},
{
"epoch": 1.6588163014540243,
"grad_norm": 3.921875,
"learning_rate": 7.44811638237336e-07,
"loss": 0.5231,
"step": 8100
},
{
"epoch": 1.660864222813844,
"grad_norm": 3.421875,
"learning_rate": 7.361278605963884e-07,
"loss": 0.5012,
"step": 8110
},
{
"epoch": 1.6629121441736636,
"grad_norm": 3.703125,
"learning_rate": 7.274909781612033e-07,
"loss": 0.4458,
"step": 8120
},
{
"epoch": 1.6649600655334835,
"grad_norm": 4.15625,
"learning_rate": 7.18901085922526e-07,
"loss": 0.504,
"step": 8130
},
{
"epoch": 1.6670079868933034,
"grad_norm": 3.53125,
"learning_rate": 7.103582783542867e-07,
"loss": 0.4965,
"step": 8140
},
{
"epoch": 1.669055908253123,
"grad_norm": 3.96875,
"learning_rate": 7.018626494125674e-07,
"loss": 0.5263,
"step": 8150
},
{
"epoch": 1.6711038296129428,
"grad_norm": 4.53125,
"learning_rate": 6.934142925345661e-07,
"loss": 0.5062,
"step": 8160
},
{
"epoch": 1.6731517509727627,
"grad_norm": 3.078125,
"learning_rate": 6.850133006375704e-07,
"loss": 0.4966,
"step": 8170
},
{
"epoch": 1.6751996723325824,
"grad_norm": 4.21875,
"learning_rate": 6.766597661179352e-07,
"loss": 0.4877,
"step": 8180
},
{
"epoch": 1.6772475936924023,
"grad_norm": 4.1875,
"learning_rate": 6.683537808500673e-07,
"loss": 0.5252,
"step": 8190
},
{
"epoch": 1.679295515052222,
"grad_norm": 3.984375,
"learning_rate": 6.600954361854117e-07,
"loss": 0.5023,
"step": 8200
},
{
"epoch": 1.6813434364120416,
"grad_norm": 4.65625,
"learning_rate": 6.518848229514541e-07,
"loss": 0.5164,
"step": 8210
},
{
"epoch": 1.6833913577718616,
"grad_norm": 3.921875,
"learning_rate": 6.437220314507098e-07,
"loss": 0.4991,
"step": 8220
},
{
"epoch": 1.6854392791316815,
"grad_norm": 3.828125,
"learning_rate": 6.356071514597467e-07,
"loss": 0.4688,
"step": 8230
},
{
"epoch": 1.6874872004915011,
"grad_norm": 4.3125,
"learning_rate": 6.275402722281798e-07,
"loss": 0.5088,
"step": 8240
},
{
"epoch": 1.6895351218513208,
"grad_norm": 3.703125,
"learning_rate": 6.195214824777068e-07,
"loss": 0.496,
"step": 8250
},
{
"epoch": 1.6915830432111407,
"grad_norm": 3.875,
"learning_rate": 6.115508704011208e-07,
"loss": 0.5199,
"step": 8260
},
{
"epoch": 1.6936309645709606,
"grad_norm": 3.296875,
"learning_rate": 6.036285236613437e-07,
"loss": 0.4849,
"step": 8270
},
{
"epoch": 1.6956788859307803,
"grad_norm": 3.453125,
"learning_rate": 5.957545293904632e-07,
"loss": 0.489,
"step": 8280
},
{
"epoch": 1.6977268072906,
"grad_norm": 4.25,
"learning_rate": 5.879289741887739e-07,
"loss": 0.4736,
"step": 8290
},
{
"epoch": 1.6997747286504197,
"grad_norm": 4.65625,
"learning_rate": 5.801519441238229e-07,
"loss": 0.4806,
"step": 8300
},
{
"epoch": 1.7018226500102396,
"grad_norm": 3.359375,
"learning_rate": 5.72423524729469e-07,
"loss": 0.4315,
"step": 8310
},
{
"epoch": 1.7038705713700595,
"grad_norm": 4.03125,
"learning_rate": 5.647438010049305e-07,
"loss": 0.5183,
"step": 8320
},
{
"epoch": 1.7059184927298792,
"grad_norm": 3.328125,
"learning_rate": 5.571128574138667e-07,
"loss": 0.4632,
"step": 8330
},
{
"epoch": 1.7079664140896988,
"grad_norm": 4.0,
"learning_rate": 5.495307778834319e-07,
"loss": 0.5044,
"step": 8340
},
{
"epoch": 1.7100143354495188,
"grad_norm": 3.375,
"learning_rate": 5.419976458033666e-07,
"loss": 0.5054,
"step": 8350
},
{
"epoch": 1.7120622568093387,
"grad_norm": 3.90625,
"learning_rate": 5.345135440250687e-07,
"loss": 0.5582,
"step": 8360
},
{
"epoch": 1.7141101781691583,
"grad_norm": 3.9375,
"learning_rate": 5.270785548606927e-07,
"loss": 0.5177,
"step": 8370
},
{
"epoch": 1.716158099528978,
"grad_norm": 3.921875,
"learning_rate": 5.196927600822366e-07,
"loss": 0.5106,
"step": 8380
},
{
"epoch": 1.718206020888798,
"grad_norm": 3.40625,
"learning_rate": 5.123562409206457e-07,
"loss": 0.4848,
"step": 8390
},
{
"epoch": 1.7202539422486176,
"grad_norm": 3.796875,
"learning_rate": 5.050690780649193e-07,
"loss": 0.5181,
"step": 8400
},
{
"epoch": 1.7223018636084375,
"grad_norm": 4.3125,
"learning_rate": 4.978313516612232e-07,
"loss": 0.4963,
"step": 8410
},
{
"epoch": 1.7243497849682572,
"grad_norm": 3.1875,
"learning_rate": 4.906431413120056e-07,
"loss": 0.5136,
"step": 8420
},
{
"epoch": 1.7263977063280769,
"grad_norm": 4.0,
"learning_rate": 4.835045260751297e-07,
"loss": 0.5049,
"step": 8430
},
{
"epoch": 1.7284456276878968,
"grad_norm": 3.546875,
"learning_rate": 4.7641558446298974e-07,
"loss": 0.5492,
"step": 8440
},
{
"epoch": 1.7304935490477167,
"grad_norm": 3.921875,
"learning_rate": 4.6937639444166514e-07,
"loss": 0.5012,
"step": 8450
},
{
"epoch": 1.7325414704075364,
"grad_norm": 3.359375,
"learning_rate": 4.6238703343004565e-07,
"loss": 0.4954,
"step": 8460
},
{
"epoch": 1.734589391767356,
"grad_norm": 4.1875,
"learning_rate": 4.5544757829899535e-07,
"loss": 0.4714,
"step": 8470
},
{
"epoch": 1.736637313127176,
"grad_norm": 3.890625,
"learning_rate": 4.485581053704957e-07,
"loss": 0.485,
"step": 8480
},
{
"epoch": 1.7386852344869959,
"grad_norm": 3.390625,
"learning_rate": 4.417186904168125e-07,
"loss": 0.4994,
"step": 8490
},
{
"epoch": 1.7407331558468155,
"grad_norm": 3.453125,
"learning_rate": 4.3492940865966006e-07,
"loss": 0.4771,
"step": 8500
},
{
"epoch": 1.7427810772066352,
"grad_norm": 3.578125,
"learning_rate": 4.2819033476937386e-07,
"loss": 0.4589,
"step": 8510
},
{
"epoch": 1.744828998566455,
"grad_norm": 4.15625,
"learning_rate": 4.2150154286409006e-07,
"loss": 0.5249,
"step": 8520
},
{
"epoch": 1.7468769199262748,
"grad_norm": 4.0,
"learning_rate": 4.148631065089315e-07,
"loss": 0.5176,
"step": 8530
},
{
"epoch": 1.7489248412860947,
"grad_norm": 3.84375,
"learning_rate": 4.0827509871519455e-07,
"loss": 0.5037,
"step": 8540
},
{
"epoch": 1.7509727626459144,
"grad_norm": 3.953125,
"learning_rate": 4.0173759193955107e-07,
"loss": 0.5298,
"step": 8550
},
{
"epoch": 1.753020684005734,
"grad_norm": 3.703125,
"learning_rate": 3.952506580832477e-07,
"loss": 0.5212,
"step": 8560
},
{
"epoch": 1.755068605365554,
"grad_norm": 4.15625,
"learning_rate": 3.8881436849131925e-07,
"loss": 0.5017,
"step": 8570
},
{
"epoch": 1.7571165267253739,
"grad_norm": 3.265625,
"learning_rate": 3.8242879395179677e-07,
"loss": 0.5061,
"step": 8580
},
{
"epoch": 1.7591644480851936,
"grad_norm": 3.921875,
"learning_rate": 3.760940046949385e-07,
"loss": 0.4965,
"step": 8590
},
{
"epoch": 1.7612123694450132,
"grad_norm": 3.96875,
"learning_rate": 3.69810070392449e-07,
"loss": 0.5191,
"step": 8600
},
{
"epoch": 1.763260290804833,
"grad_norm": 4.1875,
"learning_rate": 3.63577060156719e-07,
"loss": 0.5086,
"step": 8610
},
{
"epoch": 1.7653082121646528,
"grad_norm": 4.40625,
"learning_rate": 3.573950425400602e-07,
"loss": 0.5109,
"step": 8620
},
{
"epoch": 1.7673561335244727,
"grad_norm": 4.34375,
"learning_rate": 3.512640855339561e-07,
"loss": 0.4675,
"step": 8630
},
{
"epoch": 1.7694040548842924,
"grad_norm": 3.828125,
"learning_rate": 3.4518425656831e-07,
"loss": 0.5042,
"step": 8640
},
{
"epoch": 1.771451976244112,
"grad_norm": 4.09375,
"learning_rate": 3.391556225107073e-07,
"loss": 0.5347,
"step": 8650
},
{
"epoch": 1.773499897603932,
"grad_norm": 4.15625,
"learning_rate": 3.3317824966567713e-07,
"loss": 0.4946,
"step": 8660
},
{
"epoch": 1.775547818963752,
"grad_norm": 3.96875,
"learning_rate": 3.272522037739634e-07,
"loss": 0.4695,
"step": 8670
},
{
"epoch": 1.7775957403235716,
"grad_norm": 4.625,
"learning_rate": 3.213775500118038e-07,
"loss": 0.5263,
"step": 8680
},
{
"epoch": 1.7796436616833913,
"grad_norm": 4.1875,
"learning_rate": 3.155543529902111e-07,
"loss": 0.4979,
"step": 8690
},
{
"epoch": 1.7816915830432112,
"grad_norm": 3.390625,
"learning_rate": 3.0978267675426164e-07,
"loss": 0.4523,
"step": 8700
},
{
"epoch": 1.783739504403031,
"grad_norm": 4.0625,
"learning_rate": 3.040625847823958e-07,
"loss": 0.5158,
"step": 8710
},
{
"epoch": 1.7857874257628508,
"grad_norm": 4.0625,
"learning_rate": 2.983941399857138e-07,
"loss": 0.475,
"step": 8720
},
{
"epoch": 1.7878353471226704,
"grad_norm": 3.515625,
"learning_rate": 2.9277740470728623e-07,
"loss": 0.5373,
"step": 8730
},
{
"epoch": 1.7898832684824901,
"grad_norm": 3.90625,
"learning_rate": 2.8721244072146995e-07,
"loss": 0.4803,
"step": 8740
},
{
"epoch": 1.79193118984231,
"grad_norm": 3.90625,
"learning_rate": 2.816993092332265e-07,
"loss": 0.5106,
"step": 8750
},
{
"epoch": 1.79397911120213,
"grad_norm": 3.828125,
"learning_rate": 2.7623807087745035e-07,
"loss": 0.5295,
"step": 8760
},
{
"epoch": 1.7960270325619496,
"grad_norm": 3.609375,
"learning_rate": 2.708287857183006e-07,
"loss": 0.5699,
"step": 8770
},
{
"epoch": 1.7980749539217693,
"grad_norm": 4.0,
"learning_rate": 2.654715132485414e-07,
"loss": 0.4657,
"step": 8780
},
{
"epoch": 1.8001228752815892,
"grad_norm": 3.796875,
"learning_rate": 2.6016631238888865e-07,
"loss": 0.4737,
"step": 8790
},
{
"epoch": 1.8021707966414091,
"grad_norm": 3.796875,
"learning_rate": 2.549132414873584e-07,
"loss": 0.5056,
"step": 8800
},
{
"epoch": 1.8042187180012288,
"grad_norm": 3.015625,
"learning_rate": 2.497123583186295e-07,
"loss": 0.5065,
"step": 8810
},
{
"epoch": 1.8062666393610485,
"grad_norm": 3.6875,
"learning_rate": 2.4456372008340724e-07,
"loss": 0.4631,
"step": 8820
},
{
"epoch": 1.8083145607208682,
"grad_norm": 3.859375,
"learning_rate": 2.394673834077882e-07,
"loss": 0.5392,
"step": 8830
},
{
"epoch": 1.810362482080688,
"grad_norm": 3.78125,
"learning_rate": 2.3442340434264798e-07,
"loss": 0.4679,
"step": 8840
},
{
"epoch": 1.812410403440508,
"grad_norm": 4.5625,
"learning_rate": 2.2943183836301676e-07,
"loss": 0.518,
"step": 8850
},
{
"epoch": 1.8144583248003276,
"grad_norm": 4.0625,
"learning_rate": 2.2449274036747072e-07,
"loss": 0.5188,
"step": 8860
},
{
"epoch": 1.8165062461601473,
"grad_norm": 4.0,
"learning_rate": 2.1960616467753104e-07,
"loss": 0.4933,
"step": 8870
},
{
"epoch": 1.8185541675199672,
"grad_norm": 3.84375,
"learning_rate": 2.1477216503706267e-07,
"loss": 0.5042,
"step": 8880
},
{
"epoch": 1.8206020888797871,
"grad_norm": 3.8125,
"learning_rate": 2.0999079461168692e-07,
"loss": 0.4993,
"step": 8890
},
{
"epoch": 1.8226500102396068,
"grad_norm": 4.15625,
"learning_rate": 2.0526210598819373e-07,
"loss": 0.5079,
"step": 8900
},
{
"epoch": 1.8246979315994265,
"grad_norm": 3.578125,
"learning_rate": 2.0058615117396486e-07,
"loss": 0.49,
"step": 8910
},
{
"epoch": 1.8267458529592464,
"grad_norm": 4.65625,
"learning_rate": 1.9596298159640149e-07,
"loss": 0.5116,
"step": 8920
},
{
"epoch": 1.8287937743190663,
"grad_norm": 3.796875,
"learning_rate": 1.9139264810235757e-07,
"loss": 0.5092,
"step": 8930
},
{
"epoch": 1.830841695678886,
"grad_norm": 3.859375,
"learning_rate": 1.8687520095758583e-07,
"loss": 0.5138,
"step": 8940
},
{
"epoch": 1.8328896170387057,
"grad_norm": 3.828125,
"learning_rate": 1.8241068984617415e-07,
"loss": 0.5163,
"step": 8950
},
{
"epoch": 1.8349375383985254,
"grad_norm": 3.921875,
"learning_rate": 1.7799916387001183e-07,
"loss": 0.4741,
"step": 8960
},
{
"epoch": 1.8369854597583453,
"grad_norm": 3.484375,
"learning_rate": 1.736406715482397e-07,
"loss": 0.4613,
"step": 8970
},
{
"epoch": 1.8390333811181652,
"grad_norm": 4.09375,
"learning_rate": 1.6933526081672202e-07,
"loss": 0.5445,
"step": 8980
},
{
"epoch": 1.8410813024779848,
"grad_norm": 4.28125,
"learning_rate": 1.650829790275177e-07,
"loss": 0.4681,
"step": 8990
},
{
"epoch": 1.8431292238378045,
"grad_norm": 4.65625,
"learning_rate": 1.608838729483575e-07,
"loss": 0.5008,
"step": 9000
},
{
"epoch": 1.8431292238378045,
"eval_loss": 0.5520676374435425,
"eval_runtime": 50.2055,
"eval_samples_per_second": 81.903,
"eval_steps_per_second": 40.952,
"step": 9000
},
{
"epoch": 1.8451771451976244,
"grad_norm": 4.375,
"learning_rate": 1.5673798876213398e-07,
"loss": 0.4875,
"step": 9010
},
{
"epoch": 1.8472250665574443,
"grad_norm": 4.25,
"learning_rate": 1.5264537206638896e-07,
"loss": 0.4904,
"step": 9020
},
{
"epoch": 1.849272987917264,
"grad_norm": 3.234375,
"learning_rate": 1.4860606787281518e-07,
"loss": 0.5164,
"step": 9030
},
{
"epoch": 1.8513209092770837,
"grad_norm": 4.34375,
"learning_rate": 1.4462012060676045e-07,
"loss": 0.4678,
"step": 9040
},
{
"epoch": 1.8533688306369034,
"grad_norm": 3.796875,
"learning_rate": 1.4068757410673762e-07,
"loss": 0.4999,
"step": 9050
},
{
"epoch": 1.8554167519967233,
"grad_norm": 3.375,
"learning_rate": 1.368084716239465e-07,
"loss": 0.4937,
"step": 9060
},
{
"epoch": 1.8574646733565432,
"grad_norm": 4.0625,
"learning_rate": 1.3298285582179104e-07,
"loss": 0.5074,
"step": 9070
},
{
"epoch": 1.8595125947163629,
"grad_norm": 3.921875,
"learning_rate": 1.2921076877541794e-07,
"loss": 0.4735,
"step": 9080
},
{
"epoch": 1.8615605160761826,
"grad_norm": 4.75,
"learning_rate": 1.2549225197124814e-07,
"loss": 0.4848,
"step": 9090
},
{
"epoch": 1.8636084374360025,
"grad_norm": 3.71875,
"learning_rate": 1.2182734630652448e-07,
"loss": 0.4821,
"step": 9100
},
{
"epoch": 1.8656563587958224,
"grad_norm": 3.484375,
"learning_rate": 1.1821609208885865e-07,
"loss": 0.4889,
"step": 9110
},
{
"epoch": 1.867704280155642,
"grad_norm": 4.4375,
"learning_rate": 1.14658529035791e-07,
"loss": 0.4905,
"step": 9120
},
{
"epoch": 1.8697522015154617,
"grad_norm": 3.53125,
"learning_rate": 1.1115469627434983e-07,
"loss": 0.4923,
"step": 9130
},
{
"epoch": 1.8718001228752816,
"grad_norm": 3.1875,
"learning_rate": 1.0770463234062556e-07,
"loss": 0.4689,
"step": 9140
},
{
"epoch": 1.8738480442351013,
"grad_norm": 4.03125,
"learning_rate": 1.0430837517934278e-07,
"loss": 0.4526,
"step": 9150
},
{
"epoch": 1.8758959655949212,
"grad_norm": 3.8125,
"learning_rate": 1.0096596214344723e-07,
"loss": 0.5039,
"step": 9160
},
{
"epoch": 1.877943886954741,
"grad_norm": 3.796875,
"learning_rate": 9.767742999368945e-08,
"loss": 0.5123,
"step": 9170
},
{
"epoch": 1.8799918083145606,
"grad_norm": 3.9375,
"learning_rate": 9.444281489822737e-08,
"loss": 0.5069,
"step": 9180
},
{
"epoch": 1.8820397296743805,
"grad_norm": 3.375,
"learning_rate": 9.126215243221992e-08,
"loss": 0.5134,
"step": 9190
},
{
"epoch": 1.8840876510342004,
"grad_norm": 3.96875,
"learning_rate": 8.813547757744568e-08,
"loss": 0.5314,
"step": 9200
},
{
"epoch": 1.88613557239402,
"grad_norm": 3.71875,
"learning_rate": 8.506282472190819e-08,
"loss": 0.5024,
"step": 9210
},
{
"epoch": 1.8881834937538398,
"grad_norm": 3.90625,
"learning_rate": 8.204422765946741e-08,
"loss": 0.4582,
"step": 9220
},
{
"epoch": 1.8902314151136597,
"grad_norm": 3.578125,
"learning_rate": 7.907971958945992e-08,
"loss": 0.5259,
"step": 9230
},
{
"epoch": 1.8922793364734796,
"grad_norm": 4.5625,
"learning_rate": 7.616933311633823e-08,
"loss": 0.4899,
"step": 9240
},
{
"epoch": 1.8943272578332992,
"grad_norm": 4.40625,
"learning_rate": 7.331310024931038e-08,
"loss": 0.4689,
"step": 9250
},
{
"epoch": 1.896375179193119,
"grad_norm": 4.625,
"learning_rate": 7.051105240198975e-08,
"loss": 0.5188,
"step": 9260
},
{
"epoch": 1.8984231005529386,
"grad_norm": 3.796875,
"learning_rate": 6.776322039204642e-08,
"loss": 0.4636,
"step": 9270
},
{
"epoch": 1.9004710219127585,
"grad_norm": 3.984375,
"learning_rate": 6.506963444087355e-08,
"loss": 0.4749,
"step": 9280
},
{
"epoch": 1.9025189432725784,
"grad_norm": 4.34375,
"learning_rate": 6.243032417324768e-08,
"loss": 0.549,
"step": 9290
},
{
"epoch": 1.904566864632398,
"grad_norm": 3.953125,
"learning_rate": 5.984531861701004e-08,
"loss": 0.4987,
"step": 9300
},
{
"epoch": 1.9066147859922178,
"grad_norm": 4.0,
"learning_rate": 5.7314646202742405e-08,
"loss": 0.5011,
"step": 9310
},
{
"epoch": 1.9086627073520377,
"grad_norm": 3.671875,
"learning_rate": 5.483833476345624e-08,
"loss": 0.5187,
"step": 9320
},
{
"epoch": 1.9107106287118576,
"grad_norm": 4.0625,
"learning_rate": 5.241641153428734e-08,
"loss": 0.5182,
"step": 9330
},
{
"epoch": 1.9127585500716773,
"grad_norm": 4.09375,
"learning_rate": 5.004890315219446e-08,
"loss": 0.5202,
"step": 9340
},
{
"epoch": 1.914806471431497,
"grad_norm": 3.53125,
"learning_rate": 4.7735835655667864e-08,
"loss": 0.5006,
"step": 9350
},
{
"epoch": 1.9168543927913169,
"grad_norm": 4.5625,
"learning_rate": 4.547723448444286e-08,
"loss": 0.4896,
"step": 9360
},
{
"epoch": 1.9189023141511365,
"grad_norm": 4.5,
"learning_rate": 4.3273124479218386e-08,
"loss": 0.5277,
"step": 9370
},
{
"epoch": 1.9209502355109564,
"grad_norm": 4.4375,
"learning_rate": 4.112352988138557e-08,
"loss": 0.4983,
"step": 9380
},
{
"epoch": 1.9229981568707761,
"grad_norm": 4.40625,
"learning_rate": 3.902847433276014e-08,
"loss": 0.5211,
"step": 9390
},
{
"epoch": 1.9250460782305958,
"grad_norm": 3.6875,
"learning_rate": 3.698798087532485e-08,
"loss": 0.4465,
"step": 9400
},
{
"epoch": 1.9270939995904157,
"grad_norm": 3.09375,
"learning_rate": 3.500207195096972e-08,
"loss": 0.4819,
"step": 9410
},
{
"epoch": 1.9291419209502356,
"grad_norm": 3.875,
"learning_rate": 3.3070769401254424e-08,
"loss": 0.5247,
"step": 9420
},
{
"epoch": 1.9311898423100553,
"grad_norm": 4.125,
"learning_rate": 3.1194094467159044e-08,
"loss": 0.5271,
"step": 9430
},
{
"epoch": 1.933237763669875,
"grad_norm": 3.546875,
"learning_rate": 2.9372067788857594e-08,
"loss": 0.4789,
"step": 9440
},
{
"epoch": 1.935285685029695,
"grad_norm": 4.4375,
"learning_rate": 2.760470940548543e-08,
"loss": 0.4851,
"step": 9450
},
{
"epoch": 1.9373336063895148,
"grad_norm": 4.8125,
"learning_rate": 2.58920387549233e-08,
"loss": 0.4704,
"step": 9460
},
{
"epoch": 1.9393815277493345,
"grad_norm": 3.328125,
"learning_rate": 2.4234074673580853e-08,
"loss": 0.5505,
"step": 9470
},
{
"epoch": 1.9414294491091542,
"grad_norm": 4.125,
"learning_rate": 2.2630835396190155e-08,
"loss": 0.5198,
"step": 9480
},
{
"epoch": 1.9434773704689738,
"grad_norm": 3.203125,
"learning_rate": 2.1082338555605265e-08,
"loss": 0.4826,
"step": 9490
},
{
"epoch": 1.9455252918287937,
"grad_norm": 3.578125,
"learning_rate": 1.9588601182608524e-08,
"loss": 0.5109,
"step": 9500
},
{
"epoch": 1.9475732131886137,
"grad_norm": 3.6875,
"learning_rate": 1.814963970572292e-08,
"loss": 0.5035,
"step": 9510
},
{
"epoch": 1.9496211345484333,
"grad_norm": 3.859375,
"learning_rate": 1.676546995103223e-08,
"loss": 0.513,
"step": 9520
},
{
"epoch": 1.951669055908253,
"grad_norm": 3.734375,
"learning_rate": 1.5436107142003943e-08,
"loss": 0.5197,
"step": 9530
},
{
"epoch": 1.953716977268073,
"grad_norm": 3.203125,
"learning_rate": 1.4161565899327157e-08,
"loss": 0.4731,
"step": 9540
},
{
"epoch": 1.9557648986278928,
"grad_norm": 3.953125,
"learning_rate": 1.2941860240746617e-08,
"loss": 0.4463,
"step": 9550
},
{
"epoch": 1.9578128199877125,
"grad_norm": 3.640625,
"learning_rate": 1.1777003580911161e-08,
"loss": 0.5008,
"step": 9560
},
{
"epoch": 1.9598607413475322,
"grad_norm": 3.71875,
"learning_rate": 1.0667008731225504e-08,
"loss": 0.4966,
"step": 9570
},
{
"epoch": 1.961908662707352,
"grad_norm": 3.953125,
"learning_rate": 9.611887899710349e-09,
"loss": 0.5175,
"step": 9580
},
{
"epoch": 1.9639565840671718,
"grad_norm": 3.71875,
"learning_rate": 8.61165269086639e-09,
"loss": 0.479,
"step": 9590
},
{
"epoch": 1.9660045054269917,
"grad_norm": 3.765625,
"learning_rate": 7.666314105547745e-09,
"loss": 0.5428,
"step": 9600
},
{
"epoch": 1.9680524267868114,
"grad_norm": 4.3125,
"learning_rate": 6.775882540841494e-09,
"loss": 0.4741,
"step": 9610
},
{
"epoch": 1.970100348146631,
"grad_norm": 4.03125,
"learning_rate": 5.940367789951107e-09,
"loss": 0.4811,
"step": 9620
},
{
"epoch": 1.972148269506451,
"grad_norm": 3.609375,
"learning_rate": 5.159779042092083e-09,
"loss": 0.54,
"step": 9630
},
{
"epoch": 1.9741961908662709,
"grad_norm": 4.0,
"learning_rate": 4.434124882388146e-09,
"loss": 0.4908,
"step": 9640
},
{
"epoch": 1.9762441122260905,
"grad_norm": 3.546875,
"learning_rate": 3.763413291776874e-09,
"loss": 0.5134,
"step": 9650
},
{
"epoch": 1.9782920335859102,
"grad_norm": 4.09375,
"learning_rate": 3.1476516469247655e-09,
"loss": 0.4972,
"step": 9660
},
{
"epoch": 1.9803399549457301,
"grad_norm": 3.828125,
"learning_rate": 2.586846720141756e-09,
"loss": 0.5337,
"step": 9670
},
{
"epoch": 1.98238787630555,
"grad_norm": 3.625,
"learning_rate": 2.0810046793118266e-09,
"loss": 0.5048,
"step": 9680
},
{
"epoch": 1.9844357976653697,
"grad_norm": 3.515625,
"learning_rate": 1.6301310878197307e-09,
"loss": 0.472,
"step": 9690
},
{
"epoch": 1.9864837190251894,
"grad_norm": 3.921875,
"learning_rate": 1.2342309044943712e-09,
"loss": 0.4877,
"step": 9700
},
{
"epoch": 1.988531640385009,
"grad_norm": 3.46875,
"learning_rate": 8.933084835521799e-10,
"loss": 0.4863,
"step": 9710
},
{
"epoch": 1.990579561744829,
"grad_norm": 4.03125,
"learning_rate": 6.073675745482677e-10,
"loss": 0.4992,
"step": 9720
},
{
"epoch": 1.9926274831046489,
"grad_norm": 3.53125,
"learning_rate": 3.764113223375665e-10,
"loss": 0.5157,
"step": 9730
},
{
"epoch": 1.9946754044644686,
"grad_norm": 4.3125,
"learning_rate": 2.0044226703819225e-10,
"loss": 0.4896,
"step": 9740
},
{
"epoch": 1.9967233258242882,
"grad_norm": 3.234375,
"learning_rate": 7.946234400590947e-11,
"loss": 0.4628,
"step": 9750
},
{
"epoch": 1.9987712471841081,
"grad_norm": 4.125,
"learning_rate": 1.3472883809706461e-11,
"loss": 0.4633,
"step": 9760
}
],
"logging_steps": 10,
"max_steps": 9766,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 6.580184799992873e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}