prometheus-7b-preference / trainer_state.json
terry69's picture
Model save
7b781e5 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9995859213250518,
"eval_steps": 500,
"global_step": 1207,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0008281573498964803,
"grad_norm": 23.327525151011056,
"learning_rate": 8.264462809917357e-08,
"loss": 1.4005,
"step": 1
},
{
"epoch": 0.004140786749482402,
"grad_norm": 23.258943369441326,
"learning_rate": 4.132231404958678e-07,
"loss": 1.4149,
"step": 5
},
{
"epoch": 0.008281573498964804,
"grad_norm": 8.596486068396079,
"learning_rate": 8.264462809917356e-07,
"loss": 1.3035,
"step": 10
},
{
"epoch": 0.012422360248447204,
"grad_norm": 10.471425792727166,
"learning_rate": 1.2396694214876035e-06,
"loss": 1.1551,
"step": 15
},
{
"epoch": 0.016563146997929608,
"grad_norm": 2.949608539181658,
"learning_rate": 1.6528925619834712e-06,
"loss": 1.01,
"step": 20
},
{
"epoch": 0.020703933747412008,
"grad_norm": 2.8135801377147867,
"learning_rate": 2.066115702479339e-06,
"loss": 0.9591,
"step": 25
},
{
"epoch": 0.024844720496894408,
"grad_norm": 2.5182522183092853,
"learning_rate": 2.479338842975207e-06,
"loss": 0.9186,
"step": 30
},
{
"epoch": 0.028985507246376812,
"grad_norm": 2.2568295476930262,
"learning_rate": 2.8925619834710743e-06,
"loss": 0.9096,
"step": 35
},
{
"epoch": 0.033126293995859216,
"grad_norm": 2.5218798187172196,
"learning_rate": 3.3057851239669424e-06,
"loss": 0.8779,
"step": 40
},
{
"epoch": 0.037267080745341616,
"grad_norm": 2.527152447671948,
"learning_rate": 3.71900826446281e-06,
"loss": 0.8818,
"step": 45
},
{
"epoch": 0.041407867494824016,
"grad_norm": 2.3809263568393617,
"learning_rate": 4.132231404958678e-06,
"loss": 0.8688,
"step": 50
},
{
"epoch": 0.045548654244306416,
"grad_norm": 2.511225291546822,
"learning_rate": 4.5454545454545455e-06,
"loss": 0.8611,
"step": 55
},
{
"epoch": 0.049689440993788817,
"grad_norm": 2.3182185684474317,
"learning_rate": 4.958677685950414e-06,
"loss": 0.8508,
"step": 60
},
{
"epoch": 0.053830227743271224,
"grad_norm": 2.3756730633774494,
"learning_rate": 5.371900826446281e-06,
"loss": 0.855,
"step": 65
},
{
"epoch": 0.057971014492753624,
"grad_norm": 2.501349708861314,
"learning_rate": 5.785123966942149e-06,
"loss": 0.8408,
"step": 70
},
{
"epoch": 0.062111801242236024,
"grad_norm": 2.7274716294485617,
"learning_rate": 6.198347107438017e-06,
"loss": 0.8311,
"step": 75
},
{
"epoch": 0.06625258799171843,
"grad_norm": 2.473386291311923,
"learning_rate": 6.611570247933885e-06,
"loss": 0.8383,
"step": 80
},
{
"epoch": 0.07039337474120083,
"grad_norm": 2.366326788697712,
"learning_rate": 7.0247933884297525e-06,
"loss": 0.8309,
"step": 85
},
{
"epoch": 0.07453416149068323,
"grad_norm": 2.5315630281263335,
"learning_rate": 7.43801652892562e-06,
"loss": 0.8209,
"step": 90
},
{
"epoch": 0.07867494824016563,
"grad_norm": 2.589584428395013,
"learning_rate": 7.851239669421489e-06,
"loss": 0.8161,
"step": 95
},
{
"epoch": 0.08281573498964803,
"grad_norm": 2.9536546546217264,
"learning_rate": 8.264462809917356e-06,
"loss": 0.819,
"step": 100
},
{
"epoch": 0.08695652173913043,
"grad_norm": 2.44404519233414,
"learning_rate": 8.677685950413224e-06,
"loss": 0.8124,
"step": 105
},
{
"epoch": 0.09109730848861283,
"grad_norm": 2.487953243973851,
"learning_rate": 9.090909090909091e-06,
"loss": 0.8114,
"step": 110
},
{
"epoch": 0.09523809523809523,
"grad_norm": 2.6020731071259124,
"learning_rate": 9.50413223140496e-06,
"loss": 0.7995,
"step": 115
},
{
"epoch": 0.09937888198757763,
"grad_norm": 2.3960060410854966,
"learning_rate": 9.917355371900828e-06,
"loss": 0.795,
"step": 120
},
{
"epoch": 0.10351966873706005,
"grad_norm": 2.4423780661726817,
"learning_rate": 9.999665269535307e-06,
"loss": 0.7988,
"step": 125
},
{
"epoch": 0.10766045548654245,
"grad_norm": 2.570173600121692,
"learning_rate": 9.998305503833872e-06,
"loss": 0.7761,
"step": 130
},
{
"epoch": 0.11180124223602485,
"grad_norm": 2.3986936359278,
"learning_rate": 9.995900066492902e-06,
"loss": 0.7839,
"step": 135
},
{
"epoch": 0.11594202898550725,
"grad_norm": 2.487706091670238,
"learning_rate": 9.992449460742464e-06,
"loss": 0.7864,
"step": 140
},
{
"epoch": 0.12008281573498965,
"grad_norm": 2.2343649680910596,
"learning_rate": 9.98795440846732e-06,
"loss": 0.7654,
"step": 145
},
{
"epoch": 0.12422360248447205,
"grad_norm": 2.4156685822084785,
"learning_rate": 9.982415850055902e-06,
"loss": 0.7686,
"step": 150
},
{
"epoch": 0.12836438923395446,
"grad_norm": 2.5259231738748356,
"learning_rate": 9.975834944203581e-06,
"loss": 0.7751,
"step": 155
},
{
"epoch": 0.13250517598343686,
"grad_norm": 2.3772530672985326,
"learning_rate": 9.968213067670265e-06,
"loss": 0.7445,
"step": 160
},
{
"epoch": 0.13664596273291926,
"grad_norm": 2.2859422135364107,
"learning_rate": 9.959551814992364e-06,
"loss": 0.7547,
"step": 165
},
{
"epoch": 0.14078674948240166,
"grad_norm": 2.5122248077169984,
"learning_rate": 9.949852998149217e-06,
"loss": 0.7585,
"step": 170
},
{
"epoch": 0.14492753623188406,
"grad_norm": 2.4905596534144268,
"learning_rate": 9.939118646184007e-06,
"loss": 0.7467,
"step": 175
},
{
"epoch": 0.14906832298136646,
"grad_norm": 2.486536604984542,
"learning_rate": 9.927351004779275e-06,
"loss": 0.7388,
"step": 180
},
{
"epoch": 0.15320910973084886,
"grad_norm": 2.9785597879261174,
"learning_rate": 9.914552535787122e-06,
"loss": 0.7302,
"step": 185
},
{
"epoch": 0.15734989648033126,
"grad_norm": 2.3058286602799427,
"learning_rate": 9.900725916714157e-06,
"loss": 0.7314,
"step": 190
},
{
"epoch": 0.16149068322981366,
"grad_norm": 2.5472246179561737,
"learning_rate": 9.885874040161373e-06,
"loss": 0.7286,
"step": 195
},
{
"epoch": 0.16563146997929606,
"grad_norm": 2.279293729274078,
"learning_rate": 9.87000001321898e-06,
"loss": 0.7183,
"step": 200
},
{
"epoch": 0.16977225672877846,
"grad_norm": 2.3921354483270445,
"learning_rate": 9.853107156816393e-06,
"loss": 0.7134,
"step": 205
},
{
"epoch": 0.17391304347826086,
"grad_norm": 2.3525551445639024,
"learning_rate": 9.835199005027477e-06,
"loss": 0.7221,
"step": 210
},
{
"epoch": 0.17805383022774326,
"grad_norm": 2.3800199876143884,
"learning_rate": 9.816279304331202e-06,
"loss": 0.7183,
"step": 215
},
{
"epoch": 0.18219461697722567,
"grad_norm": 2.229750773842239,
"learning_rate": 9.79635201282785e-06,
"loss": 0.7194,
"step": 220
},
{
"epoch": 0.18633540372670807,
"grad_norm": 2.299300141984987,
"learning_rate": 9.775421299410977e-06,
"loss": 0.7068,
"step": 225
},
{
"epoch": 0.19047619047619047,
"grad_norm": 2.4470455330242022,
"learning_rate": 9.753491542895237e-06,
"loss": 0.6965,
"step": 230
},
{
"epoch": 0.19461697722567287,
"grad_norm": 2.4108850288342416,
"learning_rate": 9.730567331100333e-06,
"loss": 0.6961,
"step": 235
},
{
"epoch": 0.19875776397515527,
"grad_norm": 2.2444903738856166,
"learning_rate": 9.706653459891207e-06,
"loss": 0.695,
"step": 240
},
{
"epoch": 0.2028985507246377,
"grad_norm": 2.3716649705516613,
"learning_rate": 9.681754932174719e-06,
"loss": 0.6851,
"step": 245
},
{
"epoch": 0.2070393374741201,
"grad_norm": 2.3143450163448374,
"learning_rate": 9.655876956853025e-06,
"loss": 0.6876,
"step": 250
},
{
"epoch": 0.2111801242236025,
"grad_norm": 2.409260444364719,
"learning_rate": 9.629024947733836e-06,
"loss": 0.6788,
"step": 255
},
{
"epoch": 0.2153209109730849,
"grad_norm": 2.2610337983169657,
"learning_rate": 9.601204522397826e-06,
"loss": 0.6754,
"step": 260
},
{
"epoch": 0.2194616977225673,
"grad_norm": 2.340354580101845,
"learning_rate": 9.572421501023403e-06,
"loss": 0.6875,
"step": 265
},
{
"epoch": 0.2236024844720497,
"grad_norm": 2.367203272779439,
"learning_rate": 9.5426819051691e-06,
"loss": 0.6698,
"step": 270
},
{
"epoch": 0.2277432712215321,
"grad_norm": 2.4354443343778356,
"learning_rate": 9.511991956513828e-06,
"loss": 0.6737,
"step": 275
},
{
"epoch": 0.2318840579710145,
"grad_norm": 2.3653987155446528,
"learning_rate": 9.480358075555278e-06,
"loss": 0.6534,
"step": 280
},
{
"epoch": 0.2360248447204969,
"grad_norm": 2.463110193099795,
"learning_rate": 9.447786880266706e-06,
"loss": 0.6793,
"step": 285
},
{
"epoch": 0.2401656314699793,
"grad_norm": 2.356291601187774,
"learning_rate": 9.414285184712432e-06,
"loss": 0.6482,
"step": 290
},
{
"epoch": 0.2443064182194617,
"grad_norm": 2.448482745822932,
"learning_rate": 9.37985999762229e-06,
"loss": 0.6537,
"step": 295
},
{
"epoch": 0.2484472049689441,
"grad_norm": 2.167053624005552,
"learning_rate": 9.344518520925377e-06,
"loss": 0.6395,
"step": 300
},
{
"epoch": 0.2525879917184265,
"grad_norm": 2.224339535064626,
"learning_rate": 9.308268148243355e-06,
"loss": 0.6293,
"step": 305
},
{
"epoch": 0.2567287784679089,
"grad_norm": 2.214031822614262,
"learning_rate": 9.271116463343692e-06,
"loss": 0.6374,
"step": 310
},
{
"epoch": 0.2608695652173913,
"grad_norm": 2.8051942047743235,
"learning_rate": 9.23307123855307e-06,
"loss": 0.6313,
"step": 315
},
{
"epoch": 0.2650103519668737,
"grad_norm": 2.363326927922635,
"learning_rate": 9.194140433131397e-06,
"loss": 0.6324,
"step": 320
},
{
"epoch": 0.2691511387163561,
"grad_norm": 2.5833993381945124,
"learning_rate": 9.154332191606671e-06,
"loss": 0.6096,
"step": 325
},
{
"epoch": 0.2732919254658385,
"grad_norm": 2.5023763345607244,
"learning_rate": 9.113654842071114e-06,
"loss": 0.6331,
"step": 330
},
{
"epoch": 0.2774327122153209,
"grad_norm": 2.2756476411928954,
"learning_rate": 9.072116894438885e-06,
"loss": 0.6102,
"step": 335
},
{
"epoch": 0.2815734989648033,
"grad_norm": 2.2883976811600886,
"learning_rate": 9.029727038665765e-06,
"loss": 0.6196,
"step": 340
},
{
"epoch": 0.2857142857142857,
"grad_norm": 2.4045467670456024,
"learning_rate": 8.986494142931168e-06,
"loss": 0.6053,
"step": 345
},
{
"epoch": 0.2898550724637681,
"grad_norm": 2.411121438317645,
"learning_rate": 8.94242725178288e-06,
"loss": 0.6191,
"step": 350
},
{
"epoch": 0.2939958592132505,
"grad_norm": 2.2437917822304505,
"learning_rate": 8.89753558424488e-06,
"loss": 0.5877,
"step": 355
},
{
"epoch": 0.2981366459627329,
"grad_norm": 2.594060420622693,
"learning_rate": 8.851828531888692e-06,
"loss": 0.6023,
"step": 360
},
{
"epoch": 0.3022774327122153,
"grad_norm": 2.671644595885512,
"learning_rate": 8.805315656868587e-06,
"loss": 0.6078,
"step": 365
},
{
"epoch": 0.3064182194616977,
"grad_norm": 2.2680229592309695,
"learning_rate": 8.75800668992117e-06,
"loss": 0.59,
"step": 370
},
{
"epoch": 0.3105590062111801,
"grad_norm": 2.197781067793621,
"learning_rate": 8.709911528329623e-06,
"loss": 0.5923,
"step": 375
},
{
"epoch": 0.3146997929606625,
"grad_norm": 2.0913374258858495,
"learning_rate": 8.661040233853166e-06,
"loss": 0.589,
"step": 380
},
{
"epoch": 0.3188405797101449,
"grad_norm": 2.2014541423571043,
"learning_rate": 8.611403030622074e-06,
"loss": 0.5822,
"step": 385
},
{
"epoch": 0.32298136645962733,
"grad_norm": 2.261196085234065,
"learning_rate": 8.561010302998734e-06,
"loss": 0.564,
"step": 390
},
{
"epoch": 0.32712215320910976,
"grad_norm": 2.23730484550207,
"learning_rate": 8.509872593405189e-06,
"loss": 0.5645,
"step": 395
},
{
"epoch": 0.33126293995859213,
"grad_norm": 2.2692143055291063,
"learning_rate": 8.458000600117604e-06,
"loss": 0.5776,
"step": 400
},
{
"epoch": 0.33540372670807456,
"grad_norm": 2.3234912814206226,
"learning_rate": 8.40540517502813e-06,
"loss": 0.5827,
"step": 405
},
{
"epoch": 0.33954451345755693,
"grad_norm": 2.6596848933224146,
"learning_rate": 8.35209732137463e-06,
"loss": 0.5669,
"step": 410
},
{
"epoch": 0.34368530020703936,
"grad_norm": 2.170663333128378,
"learning_rate": 8.298088191438753e-06,
"loss": 0.5707,
"step": 415
},
{
"epoch": 0.34782608695652173,
"grad_norm": 2.3379705431469815,
"learning_rate": 8.243389084212808e-06,
"loss": 0.5633,
"step": 420
},
{
"epoch": 0.35196687370600416,
"grad_norm": 2.2304485025883194,
"learning_rate": 8.188011443035962e-06,
"loss": 0.5528,
"step": 425
},
{
"epoch": 0.35610766045548653,
"grad_norm": 2.190979894629082,
"learning_rate": 8.131966853200226e-06,
"loss": 0.56,
"step": 430
},
{
"epoch": 0.36024844720496896,
"grad_norm": 2.4413883194284915,
"learning_rate": 8.075267039526764e-06,
"loss": 0.5563,
"step": 435
},
{
"epoch": 0.36438923395445133,
"grad_norm": 2.14891058793579,
"learning_rate": 8.017923863912989e-06,
"loss": 0.5516,
"step": 440
},
{
"epoch": 0.36853002070393376,
"grad_norm": 2.193120721619817,
"learning_rate": 7.959949322850994e-06,
"loss": 0.5504,
"step": 445
},
{
"epoch": 0.37267080745341613,
"grad_norm": 2.2245149668941595,
"learning_rate": 7.901355544917827e-06,
"loss": 0.5464,
"step": 450
},
{
"epoch": 0.37681159420289856,
"grad_norm": 2.3390482615587356,
"learning_rate": 7.842154788238124e-06,
"loss": 0.5464,
"step": 455
},
{
"epoch": 0.38095238095238093,
"grad_norm": 2.106177697179644,
"learning_rate": 7.782359437919644e-06,
"loss": 0.5399,
"step": 460
},
{
"epoch": 0.38509316770186336,
"grad_norm": 2.122296342328107,
"learning_rate": 7.721982003462255e-06,
"loss": 0.5189,
"step": 465
},
{
"epoch": 0.38923395445134573,
"grad_norm": 2.2344930887284944,
"learning_rate": 7.661035116140856e-06,
"loss": 0.5317,
"step": 470
},
{
"epoch": 0.39337474120082816,
"grad_norm": 3.733462702610377,
"learning_rate": 7.599531526362873e-06,
"loss": 0.5362,
"step": 475
},
{
"epoch": 0.39751552795031053,
"grad_norm": 2.2217276329657607,
"learning_rate": 7.537484101000787e-06,
"loss": 0.5145,
"step": 480
},
{
"epoch": 0.40165631469979296,
"grad_norm": 2.317355987149082,
"learning_rate": 7.474905820700334e-06,
"loss": 0.5235,
"step": 485
},
{
"epoch": 0.4057971014492754,
"grad_norm": 2.2831077300833007,
"learning_rate": 7.411809777164873e-06,
"loss": 0.5194,
"step": 490
},
{
"epoch": 0.40993788819875776,
"grad_norm": 2.1848755725332873,
"learning_rate": 7.3482091704165405e-06,
"loss": 0.5177,
"step": 495
},
{
"epoch": 0.4140786749482402,
"grad_norm": 2.156631269209744,
"learning_rate": 7.284117306034733e-06,
"loss": 0.5202,
"step": 500
},
{
"epoch": 0.41821946169772256,
"grad_norm": 2.1755447650720283,
"learning_rate": 7.219547592372512e-06,
"loss": 0.5128,
"step": 505
},
{
"epoch": 0.422360248447205,
"grad_norm": 2.08598669974358,
"learning_rate": 7.15451353775151e-06,
"loss": 0.5172,
"step": 510
},
{
"epoch": 0.42650103519668736,
"grad_norm": 2.16833234073926,
"learning_rate": 7.089028747635908e-06,
"loss": 0.5126,
"step": 515
},
{
"epoch": 0.4306418219461698,
"grad_norm": 2.1117513557765486,
"learning_rate": 7.023106921786118e-06,
"loss": 0.5084,
"step": 520
},
{
"epoch": 0.43478260869565216,
"grad_norm": 2.1996894090923096,
"learning_rate": 6.956761851392706e-06,
"loss": 0.5066,
"step": 525
},
{
"epoch": 0.4389233954451346,
"grad_norm": 2.19899240264073,
"learning_rate": 6.890007416191209e-06,
"loss": 0.506,
"step": 530
},
{
"epoch": 0.44306418219461696,
"grad_norm": 2.162714599010301,
"learning_rate": 6.822857581558423e-06,
"loss": 0.4976,
"step": 535
},
{
"epoch": 0.4472049689440994,
"grad_norm": 2.1084852870864914,
"learning_rate": 6.7553263955907755e-06,
"loss": 0.4918,
"step": 540
},
{
"epoch": 0.45134575569358176,
"grad_norm": 2.094525435495713,
"learning_rate": 6.687427986165379e-06,
"loss": 0.4889,
"step": 545
},
{
"epoch": 0.4554865424430642,
"grad_norm": 2.2871720115590226,
"learning_rate": 6.6191765579844205e-06,
"loss": 0.4883,
"step": 550
},
{
"epoch": 0.45962732919254656,
"grad_norm": 2.2666748333234157,
"learning_rate": 6.550586389603451e-06,
"loss": 0.5001,
"step": 555
},
{
"epoch": 0.463768115942029,
"grad_norm": 2.1704201350055756,
"learning_rate": 6.481671830444243e-06,
"loss": 0.489,
"step": 560
},
{
"epoch": 0.46790890269151136,
"grad_norm": 2.462881842902182,
"learning_rate": 6.412447297792818e-06,
"loss": 0.4925,
"step": 565
},
{
"epoch": 0.4720496894409938,
"grad_norm": 2.2118284938147994,
"learning_rate": 6.3429272737832726e-06,
"loss": 0.4797,
"step": 570
},
{
"epoch": 0.47619047619047616,
"grad_norm": 2.271594346562813,
"learning_rate": 6.273126302368037e-06,
"loss": 0.4838,
"step": 575
},
{
"epoch": 0.4803312629399586,
"grad_norm": 2.031397121849161,
"learning_rate": 6.203058986275207e-06,
"loss": 0.4804,
"step": 580
},
{
"epoch": 0.484472049689441,
"grad_norm": 2.1980808821259012,
"learning_rate": 6.132739983953579e-06,
"loss": 0.4717,
"step": 585
},
{
"epoch": 0.4886128364389234,
"grad_norm": 2.1375563048782023,
"learning_rate": 6.062184006506027e-06,
"loss": 0.4755,
"step": 590
},
{
"epoch": 0.4927536231884058,
"grad_norm": 2.227143810312422,
"learning_rate": 5.991405814611855e-06,
"loss": 0.473,
"step": 595
},
{
"epoch": 0.4968944099378882,
"grad_norm": 2.1657589451163783,
"learning_rate": 5.920420215438794e-06,
"loss": 0.4689,
"step": 600
},
{
"epoch": 0.5010351966873706,
"grad_norm": 2.0699845464241298,
"learning_rate": 5.849242059545259e-06,
"loss": 0.459,
"step": 605
},
{
"epoch": 0.505175983436853,
"grad_norm": 2.070180087891007,
"learning_rate": 5.777886237773542e-06,
"loss": 0.4639,
"step": 610
},
{
"epoch": 0.5093167701863354,
"grad_norm": 2.141330722860632,
"learning_rate": 5.706367678134562e-06,
"loss": 0.451,
"step": 615
},
{
"epoch": 0.5134575569358178,
"grad_norm": 2.1761626984660825,
"learning_rate": 5.634701342684852e-06,
"loss": 0.467,
"step": 620
},
{
"epoch": 0.5175983436853002,
"grad_norm": 2.066437574044899,
"learning_rate": 5.562902224396416e-06,
"loss": 0.4511,
"step": 625
},
{
"epoch": 0.5217391304347826,
"grad_norm": 2.11017439615472,
"learning_rate": 5.49098534402012e-06,
"loss": 0.4613,
"step": 630
},
{
"epoch": 0.525879917184265,
"grad_norm": 2.0667579107692973,
"learning_rate": 5.418965746943281e-06,
"loss": 0.4552,
"step": 635
},
{
"epoch": 0.5300207039337475,
"grad_norm": 2.1203564953642684,
"learning_rate": 5.34685850004208e-06,
"loss": 0.4515,
"step": 640
},
{
"epoch": 0.5341614906832298,
"grad_norm": 2.1839775513140802,
"learning_rate": 5.2746786885295034e-06,
"loss": 0.4517,
"step": 645
},
{
"epoch": 0.5383022774327122,
"grad_norm": 2.0625277001105657,
"learning_rate": 5.2024414127994325e-06,
"loss": 0.448,
"step": 650
},
{
"epoch": 0.5424430641821946,
"grad_norm": 2.206189353998323,
"learning_rate": 5.13016178526756e-06,
"loss": 0.4526,
"step": 655
},
{
"epoch": 0.546583850931677,
"grad_norm": 2.095803088261813,
"learning_rate": 5.057854927209804e-06,
"loss": 0.4493,
"step": 660
},
{
"epoch": 0.5507246376811594,
"grad_norm": 2.02519967151739,
"learning_rate": 4.985535965598843e-06,
"loss": 0.4454,
"step": 665
},
{
"epoch": 0.5548654244306418,
"grad_norm": 2.0296722021624567,
"learning_rate": 4.913220029939491e-06,
"loss": 0.4358,
"step": 670
},
{
"epoch": 0.5590062111801242,
"grad_norm": 2.3083437897418473,
"learning_rate": 4.840922249103506e-06,
"loss": 0.4456,
"step": 675
},
{
"epoch": 0.5631469979296067,
"grad_norm": 2.0519867202746407,
"learning_rate": 4.7686577481645745e-06,
"loss": 0.431,
"step": 680
},
{
"epoch": 0.567287784679089,
"grad_norm": 2.1470126174135924,
"learning_rate": 4.696441645234042e-06,
"loss": 0.4379,
"step": 685
},
{
"epoch": 0.5714285714285714,
"grad_norm": 2.0560745378867846,
"learning_rate": 4.624289048298147e-06,
"loss": 0.443,
"step": 690
},
{
"epoch": 0.5755693581780539,
"grad_norm": 1.9569113027558405,
"learning_rate": 4.55221505205734e-06,
"loss": 0.4351,
"step": 695
},
{
"epoch": 0.5797101449275363,
"grad_norm": 2.0869788254714154,
"learning_rate": 4.480234734768393e-06,
"loss": 0.4263,
"step": 700
},
{
"epoch": 0.5838509316770186,
"grad_norm": 2.0909397682742883,
"learning_rate": 4.408363155089952e-06,
"loss": 0.4267,
"step": 705
},
{
"epoch": 0.587991718426501,
"grad_norm": 2.035030817366214,
"learning_rate": 4.3366153489321855e-06,
"loss": 0.4262,
"step": 710
},
{
"epoch": 0.5921325051759835,
"grad_norm": 2.081280914303008,
"learning_rate": 4.265006326311199e-06,
"loss": 0.4276,
"step": 715
},
{
"epoch": 0.5962732919254659,
"grad_norm": 2.2311910170648637,
"learning_rate": 4.1935510682088545e-06,
"loss": 0.4239,
"step": 720
},
{
"epoch": 0.6004140786749482,
"grad_norm": 2.089278016051515,
"learning_rate": 4.122264523438668e-06,
"loss": 0.4224,
"step": 725
},
{
"epoch": 0.6045548654244306,
"grad_norm": 1.924768903143278,
"learning_rate": 4.051161605518453e-06,
"loss": 0.4135,
"step": 730
},
{
"epoch": 0.6086956521739131,
"grad_norm": 2.1572030671513325,
"learning_rate": 3.980257189550316e-06,
"loss": 0.4162,
"step": 735
},
{
"epoch": 0.6128364389233955,
"grad_norm": 1.994411998332086,
"learning_rate": 3.909566109108727e-06,
"loss": 0.4133,
"step": 740
},
{
"epoch": 0.6169772256728778,
"grad_norm": 2.0520593778378626,
"learning_rate": 3.839103153137247e-06,
"loss": 0.4187,
"step": 745
},
{
"epoch": 0.6211180124223602,
"grad_norm": 2.091242798091324,
"learning_rate": 3.768883062854598e-06,
"loss": 0.4123,
"step": 750
},
{
"epoch": 0.6252587991718427,
"grad_norm": 2.0083055108666996,
"learning_rate": 3.6989205286707398e-06,
"loss": 0.4095,
"step": 755
},
{
"epoch": 0.629399585921325,
"grad_norm": 2.027716271226035,
"learning_rate": 3.6292301871135425e-06,
"loss": 0.4122,
"step": 760
},
{
"epoch": 0.6335403726708074,
"grad_norm": 2.187794954017901,
"learning_rate": 3.55982661776676e-06,
"loss": 0.3989,
"step": 765
},
{
"epoch": 0.6376811594202898,
"grad_norm": 2.063462092371761,
"learning_rate": 3.4907243402199013e-06,
"loss": 0.4085,
"step": 770
},
{
"epoch": 0.6418219461697723,
"grad_norm": 1.9854773033540876,
"learning_rate": 3.4219378110306523e-06,
"loss": 0.4073,
"step": 775
},
{
"epoch": 0.6459627329192547,
"grad_norm": 1.992830135581226,
"learning_rate": 3.353481420700495e-06,
"loss": 0.4016,
"step": 780
},
{
"epoch": 0.650103519668737,
"grad_norm": 2.0747311476669847,
"learning_rate": 3.285369490664133e-06,
"loss": 0.4021,
"step": 785
},
{
"epoch": 0.6542443064182195,
"grad_norm": 2.0479726659986803,
"learning_rate": 3.2176162702933816e-06,
"loss": 0.3988,
"step": 790
},
{
"epoch": 0.6583850931677019,
"grad_norm": 2.1244924225668793,
"learning_rate": 3.150235933916115e-06,
"loss": 0.391,
"step": 795
},
{
"epoch": 0.6625258799171843,
"grad_norm": 2.026867017023864,
"learning_rate": 3.0832425778509235e-06,
"loss": 0.3942,
"step": 800
},
{
"epoch": 0.6666666666666666,
"grad_norm": 2.0721889345390156,
"learning_rate": 3.0166502174581012e-06,
"loss": 0.3932,
"step": 805
},
{
"epoch": 0.6708074534161491,
"grad_norm": 2.076264432304557,
"learning_rate": 2.950472784207544e-06,
"loss": 0.3914,
"step": 810
},
{
"epoch": 0.6749482401656315,
"grad_norm": 1.9834852441387247,
"learning_rate": 2.8847241227642255e-06,
"loss": 0.3965,
"step": 815
},
{
"epoch": 0.6790890269151139,
"grad_norm": 2.1270225026333525,
"learning_rate": 2.819417988091814e-06,
"loss": 0.3955,
"step": 820
},
{
"epoch": 0.6832298136645962,
"grad_norm": 2.1736828836911117,
"learning_rate": 2.754568042575061e-06,
"loss": 0.3896,
"step": 825
},
{
"epoch": 0.6873706004140787,
"grad_norm": 2.0644181259123817,
"learning_rate": 2.6901878531615677e-06,
"loss": 0.3841,
"step": 830
},
{
"epoch": 0.6915113871635611,
"grad_norm": 2.0687716790652693,
"learning_rate": 2.6262908885235046e-06,
"loss": 0.3831,
"step": 835
},
{
"epoch": 0.6956521739130435,
"grad_norm": 2.0019086355999565,
"learning_rate": 2.5628905162398797e-06,
"loss": 0.3852,
"step": 840
},
{
"epoch": 0.6997929606625258,
"grad_norm": 2.092418141138653,
"learning_rate": 2.5000000000000015e-06,
"loss": 0.3919,
"step": 845
},
{
"epoch": 0.7039337474120083,
"grad_norm": 1.9851562014074176,
"learning_rate": 2.4376324968286154e-06,
"loss": 0.3833,
"step": 850
},
{
"epoch": 0.7080745341614907,
"grad_norm": 2.0158181753004976,
"learning_rate": 2.375801054333409e-06,
"loss": 0.3818,
"step": 855
},
{
"epoch": 0.7122153209109731,
"grad_norm": 2.033514347950001,
"learning_rate": 2.3145186079753685e-06,
"loss": 0.3813,
"step": 860
},
{
"epoch": 0.7163561076604554,
"grad_norm": 2.18271145418854,
"learning_rate": 2.253797978362617e-06,
"loss": 0.3878,
"step": 865
},
{
"epoch": 0.7204968944099379,
"grad_norm": 2.0192627128869525,
"learning_rate": 2.193651868568285e-06,
"loss": 0.3846,
"step": 870
},
{
"epoch": 0.7246376811594203,
"grad_norm": 2.0678978350650024,
"learning_rate": 2.1340928614729445e-06,
"loss": 0.372,
"step": 875
},
{
"epoch": 0.7287784679089027,
"grad_norm": 1.9906640969676856,
"learning_rate": 2.075133417132223e-06,
"loss": 0.3736,
"step": 880
},
{
"epoch": 0.7329192546583851,
"grad_norm": 1.9462665473651348,
"learning_rate": 2.016785870170079e-06,
"loss": 0.3693,
"step": 885
},
{
"epoch": 0.7370600414078675,
"grad_norm": 2.010463061512907,
"learning_rate": 1.9590624271983406e-06,
"loss": 0.3692,
"step": 890
},
{
"epoch": 0.7412008281573499,
"grad_norm": 2.0531402948964432,
"learning_rate": 1.9019751642630252e-06,
"loss": 0.37,
"step": 895
},
{
"epoch": 0.7453416149068323,
"grad_norm": 2.2018575972936723,
"learning_rate": 1.8455360243179537e-06,
"loss": 0.3748,
"step": 900
},
{
"epoch": 0.7494824016563147,
"grad_norm": 2.0239016138022468,
"learning_rate": 1.7897568147262323e-06,
"loss": 0.3663,
"step": 905
},
{
"epoch": 0.7536231884057971,
"grad_norm": 2.101223919549933,
"learning_rate": 1.7346492047900897e-06,
"loss": 0.363,
"step": 910
},
{
"epoch": 0.7577639751552795,
"grad_norm": 2.0063562256177945,
"learning_rate": 1.6802247233095914e-06,
"loss": 0.3656,
"step": 915
},
{
"epoch": 0.7619047619047619,
"grad_norm": 2.0140033980586334,
"learning_rate": 1.626494756170765e-06,
"loss": 0.3613,
"step": 920
},
{
"epoch": 0.7660455486542443,
"grad_norm": 2.0970932926638883,
"learning_rate": 1.5734705439636017e-06,
"loss": 0.3636,
"step": 925
},
{
"epoch": 0.7701863354037267,
"grad_norm": 2.0036334089923176,
"learning_rate": 1.5211631796304721e-06,
"loss": 0.3646,
"step": 930
},
{
"epoch": 0.7743271221532091,
"grad_norm": 1.9781895967841074,
"learning_rate": 1.46958360614543e-06,
"loss": 0.3664,
"step": 935
},
{
"epoch": 0.7784679089026915,
"grad_norm": 1.9568011172776334,
"learning_rate": 1.4187426142248723e-06,
"loss": 0.3591,
"step": 940
},
{
"epoch": 0.782608695652174,
"grad_norm": 2.0688381865589096,
"learning_rate": 1.3686508400700787e-06,
"loss": 0.3647,
"step": 945
},
{
"epoch": 0.7867494824016563,
"grad_norm": 2.0663898407393844,
"learning_rate": 1.3193187631420462e-06,
"loss": 0.3492,
"step": 950
},
{
"epoch": 0.7908902691511387,
"grad_norm": 2.055621288270255,
"learning_rate": 1.2707567039691505e-06,
"loss": 0.3527,
"step": 955
},
{
"epoch": 0.7950310559006211,
"grad_norm": 1.9925441732484688,
"learning_rate": 1.222974821988024e-06,
"loss": 0.3573,
"step": 960
},
{
"epoch": 0.7991718426501035,
"grad_norm": 2.062467657748088,
"learning_rate": 1.1759831134181504e-06,
"loss": 0.3601,
"step": 965
},
{
"epoch": 0.8033126293995859,
"grad_norm": 2.011075463880814,
"learning_rate": 1.1297914091706086e-06,
"loss": 0.3536,
"step": 970
},
{
"epoch": 0.8074534161490683,
"grad_norm": 2.0637149859980264,
"learning_rate": 1.0844093727913868e-06,
"loss": 0.3595,
"step": 975
},
{
"epoch": 0.8115942028985508,
"grad_norm": 1.9523568935411413,
"learning_rate": 1.039846498439727e-06,
"loss": 0.3563,
"step": 980
},
{
"epoch": 0.8157349896480331,
"grad_norm": 2.061998469109606,
"learning_rate": 9.961121089018933e-07,
"loss": 0.3523,
"step": 985
},
{
"epoch": 0.8198757763975155,
"grad_norm": 2.0466038173212633,
"learning_rate": 9.532153536407923e-07,
"loss": 0.3525,
"step": 990
},
{
"epoch": 0.8240165631469979,
"grad_norm": 2.051264847475936,
"learning_rate": 9.111652068818621e-07,
"loss": 0.3455,
"step": 995
},
{
"epoch": 0.8281573498964804,
"grad_norm": 2.110221360013133,
"learning_rate": 8.699704657356195e-07,
"loss": 0.3369,
"step": 1000
},
{
"epoch": 0.8322981366459627,
"grad_norm": 1.974268048949072,
"learning_rate": 8.296397483572515e-07,
"loss": 0.3473,
"step": 1005
},
{
"epoch": 0.8364389233954451,
"grad_norm": 2.1102185973268663,
"learning_rate": 7.901814921436624e-07,
"loss": 0.349,
"step": 1010
},
{
"epoch": 0.8405797101449275,
"grad_norm": 2.088942249307371,
"learning_rate": 7.516039519683105e-07,
"loss": 0.3506,
"step": 1015
},
{
"epoch": 0.84472049689441,
"grad_norm": 2.0172621716492385,
"learning_rate": 7.139151984542636e-07,
"loss": 0.3479,
"step": 1020
},
{
"epoch": 0.8488612836438924,
"grad_norm": 2.061272723188888,
"learning_rate": 6.771231162857722e-07,
"loss": 0.35,
"step": 1025
},
{
"epoch": 0.8530020703933747,
"grad_norm": 2.042641769558922,
"learning_rate": 6.412354025587509e-07,
"loss": 0.3419,
"step": 1030
},
{
"epoch": 0.8571428571428571,
"grad_norm": 2.107999860141605,
"learning_rate": 6.062595651705111e-07,
"loss": 0.3455,
"step": 1035
},
{
"epoch": 0.8612836438923396,
"grad_norm": 2.0760294798992507,
"learning_rate": 5.722029212490666e-07,
"loss": 0.3521,
"step": 1040
},
{
"epoch": 0.865424430641822,
"grad_norm": 2.0428220870108116,
"learning_rate": 5.390725956223531e-07,
"loss": 0.3422,
"step": 1045
},
{
"epoch": 0.8695652173913043,
"grad_norm": 2.001246350332589,
"learning_rate": 5.068755193276798e-07,
"loss": 0.3455,
"step": 1050
},
{
"epoch": 0.8737060041407867,
"grad_norm": 2.099442444548102,
"learning_rate": 4.756184281617121e-07,
"loss": 0.3405,
"step": 1055
},
{
"epoch": 0.8778467908902692,
"grad_norm": 2.13132891801723,
"learning_rate": 4.4530786127131575e-07,
"loss": 0.334,
"step": 1060
},
{
"epoch": 0.8819875776397516,
"grad_norm": 2.0329484032440766,
"learning_rate": 4.159501597855287e-07,
"loss": 0.3419,
"step": 1065
},
{
"epoch": 0.8861283643892339,
"grad_norm": 2.159379949424514,
"learning_rate": 3.8755146548896784e-07,
"loss": 0.3418,
"step": 1070
},
{
"epoch": 0.8902691511387164,
"grad_norm": 2.040568384317151,
"learning_rate": 3.6011771953693044e-07,
"loss": 0.3395,
"step": 1075
},
{
"epoch": 0.8944099378881988,
"grad_norm": 2.120632936565463,
"learning_rate": 3.336546612124758e-07,
"loss": 0.3401,
"step": 1080
},
{
"epoch": 0.8985507246376812,
"grad_norm": 2.09791384766389,
"learning_rate": 3.081678267257404e-07,
"loss": 0.3468,
"step": 1085
},
{
"epoch": 0.9026915113871635,
"grad_norm": 2.0437408757508924,
"learning_rate": 2.836625480557265e-07,
"loss": 0.3368,
"step": 1090
},
{
"epoch": 0.906832298136646,
"grad_norm": 2.083527928278351,
"learning_rate": 2.601439518348331e-07,
"loss": 0.3466,
"step": 1095
},
{
"epoch": 0.9109730848861284,
"grad_norm": 2.150766971163671,
"learning_rate": 2.376169582763288e-07,
"loss": 0.3433,
"step": 1100
},
{
"epoch": 0.9151138716356108,
"grad_norm": 1.9684937329053864,
"learning_rate": 2.1608628014502364e-07,
"loss": 0.3417,
"step": 1105
},
{
"epoch": 0.9192546583850931,
"grad_norm": 2.0342367177559817,
"learning_rate": 1.955564217713335e-07,
"loss": 0.3435,
"step": 1110
},
{
"epoch": 0.9233954451345756,
"grad_norm": 2.0540139915838815,
"learning_rate": 1.7603167810894662e-07,
"loss": 0.337,
"step": 1115
},
{
"epoch": 0.927536231884058,
"grad_norm": 2.006652780971983,
"learning_rate": 1.5751613383630128e-07,
"loss": 0.3322,
"step": 1120
},
{
"epoch": 0.9316770186335404,
"grad_norm": 2.0124326674054562,
"learning_rate": 1.4001366250204762e-07,
"loss": 0.3334,
"step": 1125
},
{
"epoch": 0.9358178053830227,
"grad_norm": 2.1136467451791168,
"learning_rate": 1.235279257146804e-07,
"loss": 0.3343,
"step": 1130
},
{
"epoch": 0.9399585921325052,
"grad_norm": 2.0186906193155782,
"learning_rate": 1.080623723765134e-07,
"loss": 0.3362,
"step": 1135
},
{
"epoch": 0.9440993788819876,
"grad_norm": 2.0844444504570165,
"learning_rate": 9.362023796215036e-08,
"loss": 0.3365,
"step": 1140
},
{
"epoch": 0.94824016563147,
"grad_norm": 2.073306988937045,
"learning_rate": 8.020454384160437e-08,
"loss": 0.3294,
"step": 1145
},
{
"epoch": 0.9523809523809523,
"grad_norm": 2.0748770702565755,
"learning_rate": 6.78180966482156e-08,
"loss": 0.3399,
"step": 1150
},
{
"epoch": 0.9565217391304348,
"grad_norm": 1.9873613320972088,
"learning_rate": 5.646348769148491e-08,
"loss": 0.331,
"step": 1155
},
{
"epoch": 0.9606625258799172,
"grad_norm": 2.0532519601789194,
"learning_rate": 4.6143092414961396e-08,
"loss": 0.3359,
"step": 1160
},
{
"epoch": 0.9648033126293996,
"grad_norm": 2.294030777096881,
"learning_rate": 3.685906989928656e-08,
"loss": 0.3305,
"step": 1165
},
{
"epoch": 0.968944099378882,
"grad_norm": 2.292130787520844,
"learning_rate": 2.861336241050061e-08,
"loss": 0.3419,
"step": 1170
},
{
"epoch": 0.9730848861283644,
"grad_norm": 2.057089249921181,
"learning_rate": 2.1407694993714755e-08,
"loss": 0.3321,
"step": 1175
},
{
"epoch": 0.9772256728778468,
"grad_norm": 2.0761794810893375,
"learning_rate": 1.5243575112218744e-08,
"loss": 0.336,
"step": 1180
},
{
"epoch": 0.9813664596273292,
"grad_norm": 2.054973660726792,
"learning_rate": 1.0122292332114814e-08,
"loss": 0.3386,
"step": 1185
},
{
"epoch": 0.9855072463768116,
"grad_norm": 2.0266934387654163,
"learning_rate": 6.044918052531268e-09,
"loss": 0.3347,
"step": 1190
},
{
"epoch": 0.989648033126294,
"grad_norm": 2.0133284950403927,
"learning_rate": 3.0123052814812203e-09,
"loss": 0.3356,
"step": 1195
},
{
"epoch": 0.9937888198757764,
"grad_norm": 2.106679276632288,
"learning_rate": 1.025088457409229e-09,
"loss": 0.3369,
"step": 1200
},
{
"epoch": 0.9979296066252588,
"grad_norm": 1.9986369917882558,
"learning_rate": 8.368331646302353e-11,
"loss": 0.3466,
"step": 1205
},
{
"epoch": 0.9995859213250518,
"eval_loss": 0.33997195959091187,
"eval_runtime": 125.5313,
"eval_samples_per_second": 3.107,
"eval_steps_per_second": 0.781,
"step": 1207
},
{
"epoch": 0.9995859213250518,
"step": 1207,
"total_flos": 252668899491840.0,
"train_loss": 0.5242949779505758,
"train_runtime": 27497.765,
"train_samples_per_second": 1.405,
"train_steps_per_second": 0.044
}
],
"logging_steps": 5,
"max_steps": 1207,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 252668899491840.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}