direct_0.1p_seed42_level2_style / trainer_state.json
terry69's picture
Model save
c94ac57 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 1869,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0005350454788657035,
"grad_norm": 23.13200439587652,
"learning_rate": 5.3475935828877005e-08,
"loss": 1.3189,
"step": 1
},
{
"epoch": 0.002675227394328518,
"grad_norm": 23.131726930375823,
"learning_rate": 2.6737967914438503e-07,
"loss": 1.353,
"step": 5
},
{
"epoch": 0.005350454788657036,
"grad_norm": 15.570549737683681,
"learning_rate": 5.347593582887701e-07,
"loss": 1.3012,
"step": 10
},
{
"epoch": 0.008025682182985553,
"grad_norm": 11.780171593441946,
"learning_rate": 8.021390374331551e-07,
"loss": 1.1663,
"step": 15
},
{
"epoch": 0.010700909577314071,
"grad_norm": 9.533862762103848,
"learning_rate": 1.0695187165775401e-06,
"loss": 1.0421,
"step": 20
},
{
"epoch": 0.01337613697164259,
"grad_norm": 3.4501445250164315,
"learning_rate": 1.3368983957219254e-06,
"loss": 0.9508,
"step": 25
},
{
"epoch": 0.016051364365971106,
"grad_norm": 3.498813588083818,
"learning_rate": 1.6042780748663103e-06,
"loss": 0.9231,
"step": 30
},
{
"epoch": 0.018726591760299626,
"grad_norm": 2.956338187273217,
"learning_rate": 1.8716577540106954e-06,
"loss": 0.8724,
"step": 35
},
{
"epoch": 0.021401819154628143,
"grad_norm": 3.065203880168454,
"learning_rate": 2.1390374331550802e-06,
"loss": 0.8728,
"step": 40
},
{
"epoch": 0.024077046548956663,
"grad_norm": 2.9237458529793954,
"learning_rate": 2.4064171122994653e-06,
"loss": 0.8521,
"step": 45
},
{
"epoch": 0.02675227394328518,
"grad_norm": 2.957239105890816,
"learning_rate": 2.673796791443851e-06,
"loss": 0.8408,
"step": 50
},
{
"epoch": 0.029427501337613696,
"grad_norm": 2.9820050017245925,
"learning_rate": 2.9411764705882355e-06,
"loss": 0.833,
"step": 55
},
{
"epoch": 0.03210272873194221,
"grad_norm": 3.11922520269704,
"learning_rate": 3.2085561497326205e-06,
"loss": 0.8281,
"step": 60
},
{
"epoch": 0.034777956126270736,
"grad_norm": 3.207728032321709,
"learning_rate": 3.4759358288770056e-06,
"loss": 0.8167,
"step": 65
},
{
"epoch": 0.03745318352059925,
"grad_norm": 3.670993317978224,
"learning_rate": 3.7433155080213907e-06,
"loss": 0.7982,
"step": 70
},
{
"epoch": 0.04012841091492777,
"grad_norm": 3.0341227337849017,
"learning_rate": 4.010695187165775e-06,
"loss": 0.7745,
"step": 75
},
{
"epoch": 0.042803638309256285,
"grad_norm": 3.2283761989699022,
"learning_rate": 4.2780748663101604e-06,
"loss": 0.7762,
"step": 80
},
{
"epoch": 0.0454788657035848,
"grad_norm": 3.0101028493521587,
"learning_rate": 4.5454545454545455e-06,
"loss": 0.7701,
"step": 85
},
{
"epoch": 0.048154093097913325,
"grad_norm": 3.293912596297704,
"learning_rate": 4.812834224598931e-06,
"loss": 0.7658,
"step": 90
},
{
"epoch": 0.05082932049224184,
"grad_norm": 3.2095293977680455,
"learning_rate": 5.0802139037433165e-06,
"loss": 0.7601,
"step": 95
},
{
"epoch": 0.05350454788657036,
"grad_norm": 3.0778685186320462,
"learning_rate": 5.347593582887702e-06,
"loss": 0.7563,
"step": 100
},
{
"epoch": 0.056179775280898875,
"grad_norm": 2.9974705888918654,
"learning_rate": 5.614973262032086e-06,
"loss": 0.7551,
"step": 105
},
{
"epoch": 0.05885500267522739,
"grad_norm": 2.9929536119511244,
"learning_rate": 5.882352941176471e-06,
"loss": 0.7308,
"step": 110
},
{
"epoch": 0.061530230069555915,
"grad_norm": 3.1188876697918513,
"learning_rate": 6.149732620320856e-06,
"loss": 0.7355,
"step": 115
},
{
"epoch": 0.06420545746388442,
"grad_norm": 2.98515515719028,
"learning_rate": 6.417112299465241e-06,
"loss": 0.7361,
"step": 120
},
{
"epoch": 0.06688068485821295,
"grad_norm": 3.6014756995394075,
"learning_rate": 6.684491978609626e-06,
"loss": 0.731,
"step": 125
},
{
"epoch": 0.06955591225254147,
"grad_norm": 2.976445643033122,
"learning_rate": 6.951871657754011e-06,
"loss": 0.7304,
"step": 130
},
{
"epoch": 0.07223113964686999,
"grad_norm": 3.7039824116697084,
"learning_rate": 7.219251336898396e-06,
"loss": 0.7194,
"step": 135
},
{
"epoch": 0.0749063670411985,
"grad_norm": 2.9021865436733627,
"learning_rate": 7.486631016042781e-06,
"loss": 0.7307,
"step": 140
},
{
"epoch": 0.07758159443552702,
"grad_norm": 2.953832607507296,
"learning_rate": 7.754010695187166e-06,
"loss": 0.7162,
"step": 145
},
{
"epoch": 0.08025682182985554,
"grad_norm": 2.8372158291049225,
"learning_rate": 8.02139037433155e-06,
"loss": 0.7087,
"step": 150
},
{
"epoch": 0.08293204922418405,
"grad_norm": 2.8668974273540666,
"learning_rate": 8.288770053475937e-06,
"loss": 0.7261,
"step": 155
},
{
"epoch": 0.08560727661851257,
"grad_norm": 2.8860481749783777,
"learning_rate": 8.556149732620321e-06,
"loss": 0.7312,
"step": 160
},
{
"epoch": 0.08828250401284109,
"grad_norm": 2.8016774460351095,
"learning_rate": 8.823529411764707e-06,
"loss": 0.7123,
"step": 165
},
{
"epoch": 0.0909577314071696,
"grad_norm": 2.937187072578004,
"learning_rate": 9.090909090909091e-06,
"loss": 0.7157,
"step": 170
},
{
"epoch": 0.09363295880149813,
"grad_norm": 3.01300045316444,
"learning_rate": 9.358288770053477e-06,
"loss": 0.7201,
"step": 175
},
{
"epoch": 0.09630818619582665,
"grad_norm": 2.904402672960343,
"learning_rate": 9.625668449197861e-06,
"loss": 0.6978,
"step": 180
},
{
"epoch": 0.09898341359015517,
"grad_norm": 2.8091189009166,
"learning_rate": 9.893048128342247e-06,
"loss": 0.7274,
"step": 185
},
{
"epoch": 0.10165864098448368,
"grad_norm": 2.7277794624078626,
"learning_rate": 9.999921507322408e-06,
"loss": 0.7068,
"step": 190
},
{
"epoch": 0.1043338683788122,
"grad_norm": 2.885891896010807,
"learning_rate": 9.999441838772916e-06,
"loss": 0.694,
"step": 195
},
{
"epoch": 0.10700909577314072,
"grad_norm": 2.842426519391661,
"learning_rate": 9.99852615049999e-06,
"loss": 0.7058,
"step": 200
},
{
"epoch": 0.10968432316746923,
"grad_norm": 2.900632234819327,
"learning_rate": 9.997174522364177e-06,
"loss": 0.7158,
"step": 205
},
{
"epoch": 0.11235955056179775,
"grad_norm": 2.738085044583963,
"learning_rate": 9.995387072245939e-06,
"loss": 0.71,
"step": 210
},
{
"epoch": 0.11503477795612627,
"grad_norm": 2.6019029997577166,
"learning_rate": 9.993163956035381e-06,
"loss": 0.7083,
"step": 215
},
{
"epoch": 0.11771000535045478,
"grad_norm": 2.6518352891775545,
"learning_rate": 9.990505367618647e-06,
"loss": 0.7149,
"step": 220
},
{
"epoch": 0.12038523274478331,
"grad_norm": 2.698583720906861,
"learning_rate": 9.987411538861023e-06,
"loss": 0.7105,
"step": 225
},
{
"epoch": 0.12306046013911183,
"grad_norm": 2.856141657756592,
"learning_rate": 9.9838827395867e-06,
"loss": 0.6957,
"step": 230
},
{
"epoch": 0.12573568753344033,
"grad_norm": 2.620810413725912,
"learning_rate": 9.979919277555247e-06,
"loss": 0.7048,
"step": 235
},
{
"epoch": 0.12841091492776885,
"grad_norm": 2.696743807569947,
"learning_rate": 9.97552149843478e-06,
"loss": 0.7151,
"step": 240
},
{
"epoch": 0.13108614232209737,
"grad_norm": 2.533312623679507,
"learning_rate": 9.970689785771798e-06,
"loss": 0.6878,
"step": 245
},
{
"epoch": 0.1337613697164259,
"grad_norm": 2.5874033849530655,
"learning_rate": 9.96542456095775e-06,
"loss": 0.686,
"step": 250
},
{
"epoch": 0.13643659711075443,
"grad_norm": 2.9931437945152792,
"learning_rate": 9.95972628319227e-06,
"loss": 0.683,
"step": 255
},
{
"epoch": 0.13911182450508294,
"grad_norm": 2.747387136082782,
"learning_rate": 9.953595449443134e-06,
"loss": 0.7074,
"step": 260
},
{
"epoch": 0.14178705189941146,
"grad_norm": 2.752451707047693,
"learning_rate": 9.947032594402917e-06,
"loss": 0.6836,
"step": 265
},
{
"epoch": 0.14446227929373998,
"grad_norm": 2.6178088583593198,
"learning_rate": 9.940038290442362e-06,
"loss": 0.6789,
"step": 270
},
{
"epoch": 0.1471375066880685,
"grad_norm": 2.7161306376522747,
"learning_rate": 9.932613147560464e-06,
"loss": 0.6958,
"step": 275
},
{
"epoch": 0.149812734082397,
"grad_norm": 2.4738183226635697,
"learning_rate": 9.924757813331256e-06,
"loss": 0.6845,
"step": 280
},
{
"epoch": 0.15248796147672553,
"grad_norm": 2.7377708715326157,
"learning_rate": 9.916472972847353e-06,
"loss": 0.6781,
"step": 285
},
{
"epoch": 0.15516318887105404,
"grad_norm": 2.6891393358747044,
"learning_rate": 9.907759348660186e-06,
"loss": 0.6883,
"step": 290
},
{
"epoch": 0.15783841626538256,
"grad_norm": 2.683005246024391,
"learning_rate": 9.898617700716988e-06,
"loss": 0.6648,
"step": 295
},
{
"epoch": 0.16051364365971107,
"grad_norm": 2.6446975921568963,
"learning_rate": 9.889048826294527e-06,
"loss": 0.6794,
"step": 300
},
{
"epoch": 0.1631888710540396,
"grad_norm": 2.529063997203592,
"learning_rate": 9.879053559929556e-06,
"loss": 0.6878,
"step": 305
},
{
"epoch": 0.1658640984483681,
"grad_norm": 2.565586133254063,
"learning_rate": 9.868632773346044e-06,
"loss": 0.674,
"step": 310
},
{
"epoch": 0.16853932584269662,
"grad_norm": 2.7274242462745075,
"learning_rate": 9.857787375379144e-06,
"loss": 0.663,
"step": 315
},
{
"epoch": 0.17121455323702514,
"grad_norm": 2.848595901483633,
"learning_rate": 9.84651831189593e-06,
"loss": 0.6673,
"step": 320
},
{
"epoch": 0.17388978063135366,
"grad_norm": 2.4034423483535523,
"learning_rate": 9.834826565712901e-06,
"loss": 0.6609,
"step": 325
},
{
"epoch": 0.17656500802568217,
"grad_norm": 2.4085233120198573,
"learning_rate": 9.822713156510278e-06,
"loss": 0.6597,
"step": 330
},
{
"epoch": 0.1792402354200107,
"grad_norm": 3.7596384799134372,
"learning_rate": 9.81017914074306e-06,
"loss": 0.6606,
"step": 335
},
{
"epoch": 0.1819154628143392,
"grad_norm": 2.594817078575039,
"learning_rate": 9.797225611548896e-06,
"loss": 0.6609,
"step": 340
},
{
"epoch": 0.18459069020866772,
"grad_norm": 2.424753080798461,
"learning_rate": 9.783853698652737e-06,
"loss": 0.6718,
"step": 345
},
{
"epoch": 0.18726591760299627,
"grad_norm": 2.2931332375578264,
"learning_rate": 9.770064568268329e-06,
"loss": 0.6557,
"step": 350
},
{
"epoch": 0.18994114499732478,
"grad_norm": 2.7324537556861426,
"learning_rate": 9.75585942299648e-06,
"loss": 0.6643,
"step": 355
},
{
"epoch": 0.1926163723916533,
"grad_norm": 2.683596038746053,
"learning_rate": 9.741239501720197e-06,
"loss": 0.6426,
"step": 360
},
{
"epoch": 0.19529159978598182,
"grad_norm": 2.448519765302167,
"learning_rate": 9.726206079496619e-06,
"loss": 0.671,
"step": 365
},
{
"epoch": 0.19796682718031033,
"grad_norm": 2.2927485212185617,
"learning_rate": 9.71076046744583e-06,
"loss": 0.6467,
"step": 370
},
{
"epoch": 0.20064205457463885,
"grad_norm": 2.660371307478689,
"learning_rate": 9.694904012636509e-06,
"loss": 0.6594,
"step": 375
},
{
"epoch": 0.20331728196896737,
"grad_norm": 2.7797683565641274,
"learning_rate": 9.678638097968435e-06,
"loss": 0.657,
"step": 380
},
{
"epoch": 0.20599250936329588,
"grad_norm": 2.402469238834302,
"learning_rate": 9.661964142051896e-06,
"loss": 0.6405,
"step": 385
},
{
"epoch": 0.2086677367576244,
"grad_norm": 2.654523561437982,
"learning_rate": 9.644883599083959e-06,
"loss": 0.6418,
"step": 390
},
{
"epoch": 0.21134296415195292,
"grad_norm": 2.405967933532104,
"learning_rate": 9.627397958721638e-06,
"loss": 0.6524,
"step": 395
},
{
"epoch": 0.21401819154628143,
"grad_norm": 2.578503172107829,
"learning_rate": 9.609508745951988e-06,
"loss": 0.6311,
"step": 400
},
{
"epoch": 0.21669341894060995,
"grad_norm": 2.3774846639791254,
"learning_rate": 9.591217520959095e-06,
"loss": 0.6467,
"step": 405
},
{
"epoch": 0.21936864633493847,
"grad_norm": 2.3966771491810364,
"learning_rate": 9.572525878988014e-06,
"loss": 0.6407,
"step": 410
},
{
"epoch": 0.22204387372926698,
"grad_norm": 2.480703354243811,
"learning_rate": 9.55343545020564e-06,
"loss": 0.6273,
"step": 415
},
{
"epoch": 0.2247191011235955,
"grad_norm": 2.5200505400548785,
"learning_rate": 9.533947899558521e-06,
"loss": 0.6312,
"step": 420
},
{
"epoch": 0.22739432851792402,
"grad_norm": 2.423216275937368,
"learning_rate": 9.514064926627684e-06,
"loss": 0.6335,
"step": 425
},
{
"epoch": 0.23006955591225253,
"grad_norm": 2.4278234356168213,
"learning_rate": 9.49378826548037e-06,
"loss": 0.6365,
"step": 430
},
{
"epoch": 0.23274478330658105,
"grad_norm": 2.4619687653717808,
"learning_rate": 9.473119684518834e-06,
"loss": 0.6185,
"step": 435
},
{
"epoch": 0.23542001070090957,
"grad_norm": 2.7848123604276958,
"learning_rate": 9.452060986326088e-06,
"loss": 0.6397,
"step": 440
},
{
"epoch": 0.23809523809523808,
"grad_norm": 2.3079903944247877,
"learning_rate": 9.430614007508712e-06,
"loss": 0.6387,
"step": 445
},
{
"epoch": 0.24077046548956663,
"grad_norm": 2.8513254996307382,
"learning_rate": 9.408780618536664e-06,
"loss": 0.6223,
"step": 450
},
{
"epoch": 0.24344569288389514,
"grad_norm": 2.373399909841745,
"learning_rate": 9.386562723580155e-06,
"loss": 0.6181,
"step": 455
},
{
"epoch": 0.24612092027822366,
"grad_norm": 2.57339566672756,
"learning_rate": 9.363962260343577e-06,
"loss": 0.6185,
"step": 460
},
{
"epoch": 0.24879614767255218,
"grad_norm": 2.580832141839105,
"learning_rate": 9.340981199896515e-06,
"loss": 0.6425,
"step": 465
},
{
"epoch": 0.25147137506688066,
"grad_norm": 2.555524686470341,
"learning_rate": 9.317621546501827e-06,
"loss": 0.6345,
"step": 470
},
{
"epoch": 0.2541466024612092,
"grad_norm": 2.4749404523920098,
"learning_rate": 9.293885337440869e-06,
"loss": 0.618,
"step": 475
},
{
"epoch": 0.2568218298555377,
"grad_norm": 2.4591400848474967,
"learning_rate": 9.26977464283579e-06,
"loss": 0.6289,
"step": 480
},
{
"epoch": 0.2594970572498662,
"grad_norm": 2.477210647670721,
"learning_rate": 9.245291565469007e-06,
"loss": 0.6141,
"step": 485
},
{
"epoch": 0.26217228464419473,
"grad_norm": 2.5183875769960915,
"learning_rate": 9.220438240599813e-06,
"loss": 0.6192,
"step": 490
},
{
"epoch": 0.26484751203852325,
"grad_norm": 2.3426820656477636,
"learning_rate": 9.19521683577814e-06,
"loss": 0.6246,
"step": 495
},
{
"epoch": 0.2675227394328518,
"grad_norm": 2.3239660430688427,
"learning_rate": 9.169629550655532e-06,
"loss": 0.6062,
"step": 500
},
{
"epoch": 0.27019796682718034,
"grad_norm": 2.558515798335123,
"learning_rate": 9.143678616793299e-06,
"loss": 0.6016,
"step": 505
},
{
"epoch": 0.27287319422150885,
"grad_norm": 2.6429412615760293,
"learning_rate": 9.117366297467899e-06,
"loss": 0.594,
"step": 510
},
{
"epoch": 0.27554842161583737,
"grad_norm": 2.4212436453572024,
"learning_rate": 9.090694887473539e-06,
"loss": 0.6031,
"step": 515
},
{
"epoch": 0.2782236490101659,
"grad_norm": 2.666132053105893,
"learning_rate": 9.063666712922054e-06,
"loss": 0.5941,
"step": 520
},
{
"epoch": 0.2808988764044944,
"grad_norm": 2.4639957786059488,
"learning_rate": 9.036284131040027e-06,
"loss": 0.6222,
"step": 525
},
{
"epoch": 0.2835741037988229,
"grad_norm": 2.4732688614438287,
"learning_rate": 9.008549529963202e-06,
"loss": 0.6015,
"step": 530
},
{
"epoch": 0.28624933119315144,
"grad_norm": 2.8218623731230976,
"learning_rate": 8.98046532852822e-06,
"loss": 0.6059,
"step": 535
},
{
"epoch": 0.28892455858747995,
"grad_norm": 2.617571163199572,
"learning_rate": 8.952033976061651e-06,
"loss": 0.5955,
"step": 540
},
{
"epoch": 0.29159978598180847,
"grad_norm": 2.4037441824673302,
"learning_rate": 8.923257952166391e-06,
"loss": 0.5857,
"step": 545
},
{
"epoch": 0.294275013376137,
"grad_norm": 2.553036035226681,
"learning_rate": 8.894139766505391e-06,
"loss": 0.5853,
"step": 550
},
{
"epoch": 0.2969502407704655,
"grad_norm": 2.40802811193367,
"learning_rate": 8.864681958582795e-06,
"loss": 0.6091,
"step": 555
},
{
"epoch": 0.299625468164794,
"grad_norm": 2.5044609734550742,
"learning_rate": 8.834887097522452e-06,
"loss": 0.5952,
"step": 560
},
{
"epoch": 0.30230069555912253,
"grad_norm": 2.3055364533711207,
"learning_rate": 8.80475778184386e-06,
"loss": 0.5858,
"step": 565
},
{
"epoch": 0.30497592295345105,
"grad_norm": 2.607563664009288,
"learning_rate": 8.774296639235527e-06,
"loss": 0.5974,
"step": 570
},
{
"epoch": 0.30765115034777957,
"grad_norm": 2.422746238967634,
"learning_rate": 8.743506326325814e-06,
"loss": 0.5757,
"step": 575
},
{
"epoch": 0.3103263777421081,
"grad_norm": 2.487900310698381,
"learning_rate": 8.712389528451236e-06,
"loss": 0.5906,
"step": 580
},
{
"epoch": 0.3130016051364366,
"grad_norm": 2.3479811348664574,
"learning_rate": 8.680948959422266e-06,
"loss": 0.5891,
"step": 585
},
{
"epoch": 0.3156768325307651,
"grad_norm": 2.5079111416998874,
"learning_rate": 8.649187361286641e-06,
"loss": 0.5684,
"step": 590
},
{
"epoch": 0.31835205992509363,
"grad_norm": 2.517413693590494,
"learning_rate": 8.617107504090239e-06,
"loss": 0.5818,
"step": 595
},
{
"epoch": 0.32102728731942215,
"grad_norm": 2.4647569911383416,
"learning_rate": 8.584712185635477e-06,
"loss": 0.5753,
"step": 600
},
{
"epoch": 0.32370251471375067,
"grad_norm": 2.417420337870755,
"learning_rate": 8.552004231237308e-06,
"loss": 0.5968,
"step": 605
},
{
"epoch": 0.3263777421080792,
"grad_norm": 2.3381874660970334,
"learning_rate": 8.518986493476819e-06,
"loss": 0.5623,
"step": 610
},
{
"epoch": 0.3290529695024077,
"grad_norm": 2.349848498992065,
"learning_rate": 8.485661851952443e-06,
"loss": 0.576,
"step": 615
},
{
"epoch": 0.3317281968967362,
"grad_norm": 2.4141145502524655,
"learning_rate": 8.452033213028822e-06,
"loss": 0.5822,
"step": 620
},
{
"epoch": 0.33440342429106473,
"grad_norm": 2.5070115666403936,
"learning_rate": 8.418103509583323e-06,
"loss": 0.5711,
"step": 625
},
{
"epoch": 0.33707865168539325,
"grad_norm": 2.4311984012535457,
"learning_rate": 8.383875700750272e-06,
"loss": 0.5612,
"step": 630
},
{
"epoch": 0.33975387907972177,
"grad_norm": 2.3724735367354084,
"learning_rate": 8.349352771662848e-06,
"loss": 0.5769,
"step": 635
},
{
"epoch": 0.3424291064740503,
"grad_norm": 2.2940664533295543,
"learning_rate": 8.314537733192762e-06,
"loss": 0.5574,
"step": 640
},
{
"epoch": 0.3451043338683788,
"grad_norm": 2.699187783114433,
"learning_rate": 8.279433621687658e-06,
"loss": 0.5694,
"step": 645
},
{
"epoch": 0.3477795612627073,
"grad_norm": 2.318135627329529,
"learning_rate": 8.2440434987063e-06,
"loss": 0.5685,
"step": 650
},
{
"epoch": 0.35045478865703583,
"grad_norm": 2.44893089284664,
"learning_rate": 8.208370450751568e-06,
"loss": 0.5461,
"step": 655
},
{
"epoch": 0.35313001605136435,
"grad_norm": 2.3806257195854297,
"learning_rate": 8.172417589001275e-06,
"loss": 0.5769,
"step": 660
},
{
"epoch": 0.35580524344569286,
"grad_norm": 2.4627892743606883,
"learning_rate": 8.136188049036817e-06,
"loss": 0.5607,
"step": 665
},
{
"epoch": 0.3584804708400214,
"grad_norm": 2.4036001260251116,
"learning_rate": 8.099684990569717e-06,
"loss": 0.5457,
"step": 670
},
{
"epoch": 0.3611556982343499,
"grad_norm": 2.376086863911639,
"learning_rate": 8.06291159716606e-06,
"loss": 0.5486,
"step": 675
},
{
"epoch": 0.3638309256286784,
"grad_norm": 2.398954153903048,
"learning_rate": 8.025871075968828e-06,
"loss": 0.5493,
"step": 680
},
{
"epoch": 0.36650615302300693,
"grad_norm": 2.455740633973017,
"learning_rate": 7.988566657418202e-06,
"loss": 0.5562,
"step": 685
},
{
"epoch": 0.36918138041733545,
"grad_norm": 2.369562138410344,
"learning_rate": 7.951001594969827e-06,
"loss": 0.5439,
"step": 690
},
{
"epoch": 0.37185660781166396,
"grad_norm": 2.509025529666415,
"learning_rate": 7.91317916481106e-06,
"loss": 0.5605,
"step": 695
},
{
"epoch": 0.37453183520599254,
"grad_norm": 2.8094817864501564,
"learning_rate": 7.875102665575241e-06,
"loss": 0.5425,
"step": 700
},
{
"epoch": 0.37720706260032105,
"grad_norm": 2.3572332281985195,
"learning_rate": 7.83677541805401e-06,
"loss": 0.5378,
"step": 705
},
{
"epoch": 0.37988228999464957,
"grad_norm": 2.4674842117015556,
"learning_rate": 7.798200764907691e-06,
"loss": 0.5495,
"step": 710
},
{
"epoch": 0.3825575173889781,
"grad_norm": 2.3989489571725855,
"learning_rate": 7.759382070373755e-06,
"loss": 0.5485,
"step": 715
},
{
"epoch": 0.3852327447833066,
"grad_norm": 2.674412604036896,
"learning_rate": 7.720322719973433e-06,
"loss": 0.5561,
"step": 720
},
{
"epoch": 0.3879079721776351,
"grad_norm": 2.592614853346977,
"learning_rate": 7.68102612021643e-06,
"loss": 0.5405,
"step": 725
},
{
"epoch": 0.39058319957196364,
"grad_norm": 2.459454917532783,
"learning_rate": 7.641495698303844e-06,
"loss": 0.5411,
"step": 730
},
{
"epoch": 0.39325842696629215,
"grad_norm": 2.4611176697364554,
"learning_rate": 7.60173490182926e-06,
"loss": 0.5406,
"step": 735
},
{
"epoch": 0.39593365436062067,
"grad_norm": 2.586308658244066,
"learning_rate": 7.5617471984780885e-06,
"loss": 0.5346,
"step": 740
},
{
"epoch": 0.3986088817549492,
"grad_norm": 2.5103160686362562,
"learning_rate": 7.521536075725106e-06,
"loss": 0.5425,
"step": 745
},
{
"epoch": 0.4012841091492777,
"grad_norm": 2.4458027425117566,
"learning_rate": 7.481105040530334e-06,
"loss": 0.5374,
"step": 750
},
{
"epoch": 0.4039593365436062,
"grad_norm": 2.4438992819970977,
"learning_rate": 7.440457619033155e-06,
"loss": 0.5327,
"step": 755
},
{
"epoch": 0.40663456393793473,
"grad_norm": 2.587118080093473,
"learning_rate": 7.3995973562448065e-06,
"loss": 0.5236,
"step": 760
},
{
"epoch": 0.40930979133226325,
"grad_norm": 2.387588845819119,
"learning_rate": 7.358527815739192e-06,
"loss": 0.5298,
"step": 765
},
{
"epoch": 0.41198501872659177,
"grad_norm": 2.4664891870732095,
"learning_rate": 7.317252579342096e-06,
"loss": 0.5442,
"step": 770
},
{
"epoch": 0.4146602461209203,
"grad_norm": 2.5397677105414007,
"learning_rate": 7.275775246818802e-06,
"loss": 0.5338,
"step": 775
},
{
"epoch": 0.4173354735152488,
"grad_norm": 2.412761296727546,
"learning_rate": 7.23409943556014e-06,
"loss": 0.5211,
"step": 780
},
{
"epoch": 0.4200107009095773,
"grad_norm": 2.506870676743489,
"learning_rate": 7.192228780266997e-06,
"loss": 0.5335,
"step": 785
},
{
"epoch": 0.42268592830390583,
"grad_norm": 2.405598946485478,
"learning_rate": 7.150166932633328e-06,
"loss": 0.5155,
"step": 790
},
{
"epoch": 0.42536115569823435,
"grad_norm": 2.560782615469258,
"learning_rate": 7.1079175610276775e-06,
"loss": 0.5227,
"step": 795
},
{
"epoch": 0.42803638309256287,
"grad_norm": 2.447786049193456,
"learning_rate": 7.065484350173242e-06,
"loss": 0.5188,
"step": 800
},
{
"epoch": 0.4307116104868914,
"grad_norm": 2.340577179487899,
"learning_rate": 7.022871000826519e-06,
"loss": 0.5073,
"step": 805
},
{
"epoch": 0.4333868378812199,
"grad_norm": 2.317390286964017,
"learning_rate": 6.980081229454545e-06,
"loss": 0.5213,
"step": 810
},
{
"epoch": 0.4360620652755484,
"grad_norm": 2.5055573774312654,
"learning_rate": 6.937118767910771e-06,
"loss": 0.5323,
"step": 815
},
{
"epoch": 0.43873729266987693,
"grad_norm": 2.3422177594851443,
"learning_rate": 6.893987363109595e-06,
"loss": 0.5178,
"step": 820
},
{
"epoch": 0.44141252006420545,
"grad_norm": 2.366251437677303,
"learning_rate": 6.850690776699574e-06,
"loss": 0.5186,
"step": 825
},
{
"epoch": 0.44408774745853397,
"grad_norm": 2.2289370890506484,
"learning_rate": 6.807232784735363e-06,
"loss": 0.4978,
"step": 830
},
{
"epoch": 0.4467629748528625,
"grad_norm": 2.574450646579187,
"learning_rate": 6.763617177348394e-06,
"loss": 0.5134,
"step": 835
},
{
"epoch": 0.449438202247191,
"grad_norm": 2.579012282759833,
"learning_rate": 6.719847758416316e-06,
"loss": 0.5041,
"step": 840
},
{
"epoch": 0.4521134296415195,
"grad_norm": 2.3977388995322504,
"learning_rate": 6.675928345231248e-06,
"loss": 0.5147,
"step": 845
},
{
"epoch": 0.45478865703584803,
"grad_norm": 2.3812185827652805,
"learning_rate": 6.631862768166861e-06,
"loss": 0.4962,
"step": 850
},
{
"epoch": 0.45746388443017655,
"grad_norm": 2.2969030869762266,
"learning_rate": 6.587654870344318e-06,
"loss": 0.4934,
"step": 855
},
{
"epoch": 0.46013911182450506,
"grad_norm": 2.4745323642715307,
"learning_rate": 6.543308507297094e-06,
"loss": 0.5198,
"step": 860
},
{
"epoch": 0.4628143392188336,
"grad_norm": 2.408968353469859,
"learning_rate": 6.498827546634733e-06,
"loss": 0.4983,
"step": 865
},
{
"epoch": 0.4654895666131621,
"grad_norm": 2.3068462867463166,
"learning_rate": 6.454215867705526e-06,
"loss": 0.4865,
"step": 870
},
{
"epoch": 0.4681647940074906,
"grad_norm": 2.4365247368894445,
"learning_rate": 6.409477361258188e-06,
"loss": 0.5018,
"step": 875
},
{
"epoch": 0.47084002140181913,
"grad_norm": 2.4409255239688217,
"learning_rate": 6.364615929102531e-06,
"loss": 0.4955,
"step": 880
},
{
"epoch": 0.47351524879614765,
"grad_norm": 2.447380987219203,
"learning_rate": 6.319635483769164e-06,
"loss": 0.5079,
"step": 885
},
{
"epoch": 0.47619047619047616,
"grad_norm": 2.3520300873004456,
"learning_rate": 6.274539948168279e-06,
"loss": 0.4977,
"step": 890
},
{
"epoch": 0.47886570358480474,
"grad_norm": 2.3483336437686972,
"learning_rate": 6.229333255247511e-06,
"loss": 0.4778,
"step": 895
},
{
"epoch": 0.48154093097913325,
"grad_norm": 2.3642056718240823,
"learning_rate": 6.184019347648939e-06,
"loss": 0.4737,
"step": 900
},
{
"epoch": 0.48421615837346177,
"grad_norm": 2.3120413897093455,
"learning_rate": 6.138602177365218e-06,
"loss": 0.4864,
"step": 905
},
{
"epoch": 0.4868913857677903,
"grad_norm": 2.3693154175371864,
"learning_rate": 6.093085705394934e-06,
"loss": 0.4812,
"step": 910
},
{
"epoch": 0.4895666131621188,
"grad_norm": 2.2963598026052945,
"learning_rate": 6.04747390139713e-06,
"loss": 0.4932,
"step": 915
},
{
"epoch": 0.4922418405564473,
"grad_norm": 2.3384708501252462,
"learning_rate": 6.001770743345108e-06,
"loss": 0.4821,
"step": 920
},
{
"epoch": 0.49491706795077584,
"grad_norm": 2.434655406611595,
"learning_rate": 5.9559802171794955e-06,
"loss": 0.5072,
"step": 925
},
{
"epoch": 0.49759229534510435,
"grad_norm": 2.249845501951125,
"learning_rate": 5.9101063164606165e-06,
"loss": 0.4736,
"step": 930
},
{
"epoch": 0.5002675227394329,
"grad_norm": 2.2279896314937346,
"learning_rate": 5.864153042020191e-06,
"loss": 0.4555,
"step": 935
},
{
"epoch": 0.5029427501337613,
"grad_norm": 2.3663556164493342,
"learning_rate": 5.818124401612416e-06,
"loss": 0.4782,
"step": 940
},
{
"epoch": 0.5056179775280899,
"grad_norm": 2.3362900119271535,
"learning_rate": 5.7720244095644305e-06,
"loss": 0.4856,
"step": 945
},
{
"epoch": 0.5082932049224184,
"grad_norm": 2.3247246267623574,
"learning_rate": 5.725857086426216e-06,
"loss": 0.4819,
"step": 950
},
{
"epoch": 0.5109684323167469,
"grad_norm": 2.3903304769163087,
"learning_rate": 5.679626458619947e-06,
"loss": 0.4968,
"step": 955
},
{
"epoch": 0.5136436597110754,
"grad_norm": 2.399301520169239,
"learning_rate": 5.633336558088823e-06,
"loss": 0.4823,
"step": 960
},
{
"epoch": 0.516318887105404,
"grad_norm": 2.381128904196678,
"learning_rate": 5.586991421945445e-06,
"loss": 0.4713,
"step": 965
},
{
"epoch": 0.5189941144997324,
"grad_norm": 2.254781913274343,
"learning_rate": 5.540595092119709e-06,
"loss": 0.4663,
"step": 970
},
{
"epoch": 0.521669341894061,
"grad_norm": 2.3578976466931967,
"learning_rate": 5.494151615006307e-06,
"loss": 0.464,
"step": 975
},
{
"epoch": 0.5243445692883895,
"grad_norm": 2.320183786320425,
"learning_rate": 5.44766504111181e-06,
"loss": 0.4668,
"step": 980
},
{
"epoch": 0.527019796682718,
"grad_norm": 2.4755011866404617,
"learning_rate": 5.401139424701427e-06,
"loss": 0.4624,
"step": 985
},
{
"epoch": 0.5296950240770465,
"grad_norm": 2.379615469328078,
"learning_rate": 5.354578823445404e-06,
"loss": 0.4868,
"step": 990
},
{
"epoch": 0.5323702514713751,
"grad_norm": 2.559522007767685,
"learning_rate": 5.307987298065145e-06,
"loss": 0.4553,
"step": 995
},
{
"epoch": 0.5350454788657036,
"grad_norm": 2.274771240243394,
"learning_rate": 5.26136891197906e-06,
"loss": 0.463,
"step": 1000
},
{
"epoch": 0.5377207062600321,
"grad_norm": 2.297919270188331,
"learning_rate": 5.214727730948181e-06,
"loss": 0.4766,
"step": 1005
},
{
"epoch": 0.5403959336543607,
"grad_norm": 2.4529152504712264,
"learning_rate": 5.1680678227215705e-06,
"loss": 0.458,
"step": 1010
},
{
"epoch": 0.5430711610486891,
"grad_norm": 2.323742728593664,
"learning_rate": 5.121393256681561e-06,
"loss": 0.4653,
"step": 1015
},
{
"epoch": 0.5457463884430177,
"grad_norm": 2.3396441703224427,
"learning_rate": 5.07470810348884e-06,
"loss": 0.4623,
"step": 1020
},
{
"epoch": 0.5484216158373462,
"grad_norm": 2.373211986289522,
"learning_rate": 5.02801643472745e-06,
"loss": 0.4659,
"step": 1025
},
{
"epoch": 0.5510968432316747,
"grad_norm": 2.3457374838231506,
"learning_rate": 4.98132232254967e-06,
"loss": 0.4579,
"step": 1030
},
{
"epoch": 0.5537720706260032,
"grad_norm": 2.278815006584011,
"learning_rate": 4.934629839320885e-06,
"loss": 0.4652,
"step": 1035
},
{
"epoch": 0.5564472980203318,
"grad_norm": 2.393064658527846,
"learning_rate": 4.88794305726441e-06,
"loss": 0.4473,
"step": 1040
},
{
"epoch": 0.5591225254146602,
"grad_norm": 2.3283757660012827,
"learning_rate": 4.841266048106343e-06,
"loss": 0.4467,
"step": 1045
},
{
"epoch": 0.5617977528089888,
"grad_norm": 2.320156492921696,
"learning_rate": 4.794602882720448e-06,
"loss": 0.4616,
"step": 1050
},
{
"epoch": 0.5644729802033173,
"grad_norm": 2.575616139607137,
"learning_rate": 4.747957630773124e-06,
"loss": 0.4509,
"step": 1055
},
{
"epoch": 0.5671482075976458,
"grad_norm": 2.5123589913879316,
"learning_rate": 4.701334360368473e-06,
"loss": 0.4524,
"step": 1060
},
{
"epoch": 0.5698234349919743,
"grad_norm": 2.271273576393498,
"learning_rate": 4.654737137693508e-06,
"loss": 0.45,
"step": 1065
},
{
"epoch": 0.5724986623863029,
"grad_norm": 2.2852159517598074,
"learning_rate": 4.6081700266635195e-06,
"loss": 0.4348,
"step": 1070
},
{
"epoch": 0.5751738897806313,
"grad_norm": 2.2165454885591735,
"learning_rate": 4.561637088567654e-06,
"loss": 0.4564,
"step": 1075
},
{
"epoch": 0.5778491171749599,
"grad_norm": 2.17131177389315,
"learning_rate": 4.51514238171471e-06,
"loss": 0.4301,
"step": 1080
},
{
"epoch": 0.5805243445692884,
"grad_norm": 2.3335959271604527,
"learning_rate": 4.468689961079195e-06,
"loss": 0.4603,
"step": 1085
},
{
"epoch": 0.5831995719636169,
"grad_norm": 2.3673956355147987,
"learning_rate": 4.4222838779476866e-06,
"loss": 0.447,
"step": 1090
},
{
"epoch": 0.5858747993579454,
"grad_norm": 2.2845586534949587,
"learning_rate": 4.375928179565494e-06,
"loss": 0.4364,
"step": 1095
},
{
"epoch": 0.588550026752274,
"grad_norm": 2.382390080327833,
"learning_rate": 4.329626908783685e-06,
"loss": 0.4476,
"step": 1100
},
{
"epoch": 0.5912252541466024,
"grad_norm": 2.376381672802873,
"learning_rate": 4.2833841037065e-06,
"loss": 0.4327,
"step": 1105
},
{
"epoch": 0.593900481540931,
"grad_norm": 2.19725726906033,
"learning_rate": 4.237203797339169e-06,
"loss": 0.4291,
"step": 1110
},
{
"epoch": 0.5965757089352595,
"grad_norm": 2.4498953683943787,
"learning_rate": 4.191090017236177e-06,
"loss": 0.428,
"step": 1115
},
{
"epoch": 0.599250936329588,
"grad_norm": 2.3326789003086215,
"learning_rate": 4.145046785150013e-06,
"loss": 0.4284,
"step": 1120
},
{
"epoch": 0.6019261637239165,
"grad_norm": 2.2953958459331942,
"learning_rate": 4.09907811668041e-06,
"loss": 0.4396,
"step": 1125
},
{
"epoch": 0.6046013911182451,
"grad_norm": 2.3776367722990197,
"learning_rate": 4.0531880209241356e-06,
"loss": 0.4411,
"step": 1130
},
{
"epoch": 0.6072766185125735,
"grad_norm": 2.3347711258392128,
"learning_rate": 4.0073805001253405e-06,
"loss": 0.4315,
"step": 1135
},
{
"epoch": 0.6099518459069021,
"grad_norm": 2.4383518825210064,
"learning_rate": 3.961659549326512e-06,
"loss": 0.4269,
"step": 1140
},
{
"epoch": 0.6126270733012306,
"grad_norm": 2.4340897214969983,
"learning_rate": 3.916029156020044e-06,
"loss": 0.4309,
"step": 1145
},
{
"epoch": 0.6153023006955591,
"grad_norm": 2.2754274438720747,
"learning_rate": 3.870493299800484e-06,
"loss": 0.4312,
"step": 1150
},
{
"epoch": 0.6179775280898876,
"grad_norm": 2.658137789057608,
"learning_rate": 3.82505595201745e-06,
"loss": 0.4233,
"step": 1155
},
{
"epoch": 0.6206527554842162,
"grad_norm": 2.29001224381586,
"learning_rate": 3.7797210754292766e-06,
"loss": 0.4258,
"step": 1160
},
{
"epoch": 0.6233279828785446,
"grad_norm": 2.4164984348571097,
"learning_rate": 3.7344926238574074e-06,
"loss": 0.422,
"step": 1165
},
{
"epoch": 0.6260032102728732,
"grad_norm": 2.2869156563152746,
"learning_rate": 3.6893745418415692e-06,
"loss": 0.4079,
"step": 1170
},
{
"epoch": 0.6286784376672017,
"grad_norm": 2.298993605302855,
"learning_rate": 3.6443707642957526e-06,
"loss": 0.4181,
"step": 1175
},
{
"epoch": 0.6313536650615302,
"grad_norm": 2.2595999255751167,
"learning_rate": 3.5994852161650386e-06,
"loss": 0.4227,
"step": 1180
},
{
"epoch": 0.6340288924558587,
"grad_norm": 2.4307645420194786,
"learning_rate": 3.5547218120832807e-06,
"loss": 0.4241,
"step": 1185
},
{
"epoch": 0.6367041198501873,
"grad_norm": 2.362571516450198,
"learning_rate": 3.5100844560317028e-06,
"loss": 0.4278,
"step": 1190
},
{
"epoch": 0.6393793472445158,
"grad_norm": 2.3322794126447373,
"learning_rate": 3.465577040998417e-06,
"loss": 0.4218,
"step": 1195
},
{
"epoch": 0.6420545746388443,
"grad_norm": 2.2321766112287618,
"learning_rate": 3.4212034486388972e-06,
"loss": 0.4221,
"step": 1200
},
{
"epoch": 0.6447298020331729,
"grad_norm": 2.0998355946010703,
"learning_rate": 3.376967548937457e-06,
"loss": 0.4215,
"step": 1205
},
{
"epoch": 0.6474050294275013,
"grad_norm": 2.3235714593936527,
"learning_rate": 3.332873199869719e-06,
"loss": 0.4195,
"step": 1210
},
{
"epoch": 0.6500802568218299,
"grad_norm": 2.239188684198451,
"learning_rate": 3.2889242470661553e-06,
"loss": 0.4217,
"step": 1215
},
{
"epoch": 0.6527554842161584,
"grad_norm": 2.278419097290613,
"learning_rate": 3.245124523476699e-06,
"loss": 0.4144,
"step": 1220
},
{
"epoch": 0.6554307116104869,
"grad_norm": 2.117994360937494,
"learning_rate": 3.2014778490364484e-06,
"loss": 0.4075,
"step": 1225
},
{
"epoch": 0.6581059390048154,
"grad_norm": 2.3021078140893065,
"learning_rate": 3.157988030332526e-06,
"loss": 0.4108,
"step": 1230
},
{
"epoch": 0.660781166399144,
"grad_norm": 2.3426641218010147,
"learning_rate": 3.1146588602720884e-06,
"loss": 0.4107,
"step": 1235
},
{
"epoch": 0.6634563937934724,
"grad_norm": 2.187165715147062,
"learning_rate": 3.0714941177515307e-06,
"loss": 0.4045,
"step": 1240
},
{
"epoch": 0.666131621187801,
"grad_norm": 2.309991402360575,
"learning_rate": 3.0284975673269175e-06,
"loss": 0.4103,
"step": 1245
},
{
"epoch": 0.6688068485821295,
"grad_norm": 2.1623322064245287,
"learning_rate": 2.9856729588856615e-06,
"loss": 0.3973,
"step": 1250
},
{
"epoch": 0.671482075976458,
"grad_norm": 2.2478622715565577,
"learning_rate": 2.9430240273194844e-06,
"loss": 0.4077,
"step": 1255
},
{
"epoch": 0.6741573033707865,
"grad_norm": 2.2902445551283517,
"learning_rate": 2.9005544921986774e-06,
"loss": 0.423,
"step": 1260
},
{
"epoch": 0.6768325307651151,
"grad_norm": 2.284169087431414,
"learning_rate": 2.858268057447712e-06,
"loss": 0.4158,
"step": 1265
},
{
"epoch": 0.6795077581594435,
"grad_norm": 2.458436688209604,
"learning_rate": 2.8161684110221987e-06,
"loss": 0.4031,
"step": 1270
},
{
"epoch": 0.6821829855537721,
"grad_norm": 2.3264822511255923,
"learning_rate": 2.7742592245872523e-06,
"loss": 0.4065,
"step": 1275
},
{
"epoch": 0.6848582129481006,
"grad_norm": 2.5513582936923935,
"learning_rate": 2.7325441531972685e-06,
"loss": 0.4022,
"step": 1280
},
{
"epoch": 0.6875334403424291,
"grad_norm": 2.4369769185582357,
"learning_rate": 2.691026834977161e-06,
"loss": 0.387,
"step": 1285
},
{
"epoch": 0.6902086677367576,
"grad_norm": 2.2573622305743717,
"learning_rate": 2.649710890805055e-06,
"loss": 0.387,
"step": 1290
},
{
"epoch": 0.6928838951310862,
"grad_norm": 2.27364788014749,
"learning_rate": 2.6085999239965094e-06,
"loss": 0.3908,
"step": 1295
},
{
"epoch": 0.6955591225254146,
"grad_norm": 2.1641089683573087,
"learning_rate": 2.567697519990249e-06,
"loss": 0.398,
"step": 1300
},
{
"epoch": 0.6982343499197432,
"grad_norm": 2.404524646409722,
"learning_rate": 2.52700724603547e-06,
"loss": 0.4106,
"step": 1305
},
{
"epoch": 0.7009095773140717,
"grad_norm": 2.238361007241196,
"learning_rate": 2.4865326508807274e-06,
"loss": 0.4058,
"step": 1310
},
{
"epoch": 0.7035848047084002,
"grad_norm": 2.183532695311476,
"learning_rate": 2.446277264464431e-06,
"loss": 0.395,
"step": 1315
},
{
"epoch": 0.7062600321027287,
"grad_norm": 2.250874920777629,
"learning_rate": 2.406244597606994e-06,
"loss": 0.3911,
"step": 1320
},
{
"epoch": 0.7089352594970573,
"grad_norm": 2.258464908998249,
"learning_rate": 2.3664381417046362e-06,
"loss": 0.3844,
"step": 1325
},
{
"epoch": 0.7116104868913857,
"grad_norm": 2.1688633389428094,
"learning_rate": 2.3268613684248846e-06,
"loss": 0.385,
"step": 1330
},
{
"epoch": 0.7142857142857143,
"grad_norm": 2.2067691331384722,
"learning_rate": 2.287517729403802e-06,
"loss": 0.3798,
"step": 1335
},
{
"epoch": 0.7169609416800428,
"grad_norm": 2.114368741635283,
"learning_rate": 2.2484106559449527e-06,
"loss": 0.392,
"step": 1340
},
{
"epoch": 0.7196361690743713,
"grad_norm": 2.2394615171402013,
"learning_rate": 2.2095435587201487e-06,
"loss": 0.3911,
"step": 1345
},
{
"epoch": 0.7223113964686998,
"grad_norm": 2.3108268720991143,
"learning_rate": 2.1709198274719908e-06,
"loss": 0.3793,
"step": 1350
},
{
"epoch": 0.7249866238630284,
"grad_norm": 2.1926845295729356,
"learning_rate": 2.1325428307182357e-06,
"loss": 0.3819,
"step": 1355
},
{
"epoch": 0.7276618512573568,
"grad_norm": 2.436496088519088,
"learning_rate": 2.0944159154580225e-06,
"loss": 0.395,
"step": 1360
},
{
"epoch": 0.7303370786516854,
"grad_norm": 2.075451281707529,
"learning_rate": 2.056542406879957e-06,
"loss": 0.3875,
"step": 1365
},
{
"epoch": 0.7330123060460139,
"grad_norm": 2.2280446988692497,
"learning_rate": 2.018925608072118e-06,
"loss": 0.3999,
"step": 1370
},
{
"epoch": 0.7356875334403424,
"grad_norm": 2.1625495094942346,
"learning_rate": 1.981568799733979e-06,
"loss": 0.3834,
"step": 1375
},
{
"epoch": 0.7383627608346709,
"grad_norm": 2.1676852204847243,
"learning_rate": 1.9444752398902874e-06,
"loss": 0.3844,
"step": 1380
},
{
"epoch": 0.7410379882289995,
"grad_norm": 2.1377222237435882,
"learning_rate": 1.907648163606925e-06,
"loss": 0.4002,
"step": 1385
},
{
"epoch": 0.7437132156233279,
"grad_norm": 2.089588817043856,
"learning_rate": 1.871090782708756e-06,
"loss": 0.3736,
"step": 1390
},
{
"epoch": 0.7463884430176565,
"grad_norm": 2.1924733823319627,
"learning_rate": 1.834806285499519e-06,
"loss": 0.3809,
"step": 1395
},
{
"epoch": 0.7490636704119851,
"grad_norm": 2.2869337262048814,
"learning_rate": 1.7987978364837649e-06,
"loss": 0.3845,
"step": 1400
},
{
"epoch": 0.7517388978063135,
"grad_norm": 2.1474805561252706,
"learning_rate": 1.7630685760908623e-06,
"loss": 0.3635,
"step": 1405
},
{
"epoch": 0.7544141252006421,
"grad_norm": 2.1712442490167443,
"learning_rate": 1.727621620401112e-06,
"loss": 0.3732,
"step": 1410
},
{
"epoch": 0.7570893525949706,
"grad_norm": 2.3376722565086587,
"learning_rate": 1.6924600608739843e-06,
"loss": 0.3807,
"step": 1415
},
{
"epoch": 0.7597645799892991,
"grad_norm": 2.0954193272108768,
"learning_rate": 1.6575869640784998e-06,
"loss": 0.3722,
"step": 1420
},
{
"epoch": 0.7624398073836276,
"grad_norm": 2.1621880689007464,
"learning_rate": 1.6230053714257821e-06,
"loss": 0.3801,
"step": 1425
},
{
"epoch": 0.7651150347779562,
"grad_norm": 2.14155400198408,
"learning_rate": 1.588718298903803e-06,
"loss": 0.3877,
"step": 1430
},
{
"epoch": 0.7677902621722846,
"grad_norm": 2.178590274710476,
"learning_rate": 1.554728736814356e-06,
"loss": 0.3883,
"step": 1435
},
{
"epoch": 0.7704654895666132,
"grad_norm": 2.380898516963745,
"learning_rate": 1.5210396495122481e-06,
"loss": 0.3723,
"step": 1440
},
{
"epoch": 0.7731407169609417,
"grad_norm": 2.1623659675475118,
"learning_rate": 1.4876539751467806e-06,
"loss": 0.3712,
"step": 1445
},
{
"epoch": 0.7758159443552702,
"grad_norm": 2.301153404753794,
"learning_rate": 1.45457462540549e-06,
"loss": 0.3849,
"step": 1450
},
{
"epoch": 0.7784911717495987,
"grad_norm": 2.4583510104341775,
"learning_rate": 1.4218044852602176e-06,
"loss": 0.3835,
"step": 1455
},
{
"epoch": 0.7811663991439273,
"grad_norm": 2.3878689243970963,
"learning_rate": 1.3893464127154976e-06,
"loss": 0.3696,
"step": 1460
},
{
"epoch": 0.7838416265382557,
"grad_norm": 2.1363871753550345,
"learning_rate": 1.3572032385592999e-06,
"loss": 0.3758,
"step": 1465
},
{
"epoch": 0.7865168539325843,
"grad_norm": 2.1871068590908758,
"learning_rate": 1.325377766116146e-06,
"loss": 0.3715,
"step": 1470
},
{
"epoch": 0.7891920813269128,
"grad_norm": 2.1101151011483226,
"learning_rate": 1.293872771002625e-06,
"loss": 0.3738,
"step": 1475
},
{
"epoch": 0.7918673087212413,
"grad_norm": 2.2186435189641607,
"learning_rate": 1.2626910008853154e-06,
"loss": 0.3633,
"step": 1480
},
{
"epoch": 0.7945425361155698,
"grad_norm": 2.0058197406035174,
"learning_rate": 1.231835175241155e-06,
"loss": 0.377,
"step": 1485
},
{
"epoch": 0.7972177635098984,
"grad_norm": 2.1238226266787645,
"learning_rate": 1.2013079851202642e-06,
"loss": 0.3757,
"step": 1490
},
{
"epoch": 0.7998929909042268,
"grad_norm": 2.053687958659438,
"learning_rate": 1.1711120929112507e-06,
"loss": 0.3741,
"step": 1495
},
{
"epoch": 0.8025682182985554,
"grad_norm": 2.2784092449790143,
"learning_rate": 1.141250132109009e-06,
"loss": 0.3746,
"step": 1500
},
{
"epoch": 0.8052434456928839,
"grad_norm": 2.3149910219311716,
"learning_rate": 1.1117247070850534e-06,
"loss": 0.3694,
"step": 1505
},
{
"epoch": 0.8079186730872124,
"grad_norm": 1.9494411281687747,
"learning_rate": 1.0825383928603656e-06,
"loss": 0.3729,
"step": 1510
},
{
"epoch": 0.8105939004815409,
"grad_norm": 2.1391776524549986,
"learning_rate": 1.0536937348808341e-06,
"loss": 0.3816,
"step": 1515
},
{
"epoch": 0.8132691278758695,
"grad_norm": 2.158574952643675,
"learning_rate": 1.0251932487952437e-06,
"loss": 0.3637,
"step": 1520
},
{
"epoch": 0.8159443552701979,
"grad_norm": 2.0187091954077383,
"learning_rate": 9.97039420235884e-07,
"loss": 0.3636,
"step": 1525
},
{
"epoch": 0.8186195826645265,
"grad_norm": 2.2338730709728214,
"learning_rate": 9.692347046017647e-07,
"loss": 0.3704,
"step": 1530
},
{
"epoch": 0.821294810058855,
"grad_norm": 2.0154204052322107,
"learning_rate": 9.417815268444719e-07,
"loss": 0.3559,
"step": 1535
},
{
"epoch": 0.8239700374531835,
"grad_norm": 2.2340349325353692,
"learning_rate": 9.146822812566819e-07,
"loss": 0.3694,
"step": 1540
},
{
"epoch": 0.826645264847512,
"grad_norm": 2.117191760005575,
"learning_rate": 8.879393312633405e-07,
"loss": 0.3633,
"step": 1545
},
{
"epoch": 0.8293204922418406,
"grad_norm": 2.343869437714791,
"learning_rate": 8.615550092155478e-07,
"loss": 0.3715,
"step": 1550
},
{
"epoch": 0.831995719636169,
"grad_norm": 2.163827510506345,
"learning_rate": 8.355316161871369e-07,
"loss": 0.3688,
"step": 1555
},
{
"epoch": 0.8346709470304976,
"grad_norm": 2.1077539661425573,
"learning_rate": 8.098714217739928e-07,
"loss": 0.3625,
"step": 1560
},
{
"epoch": 0.8373461744248261,
"grad_norm": 2.3841521555596144,
"learning_rate": 7.845766638961172e-07,
"loss": 0.3701,
"step": 1565
},
{
"epoch": 0.8400214018191546,
"grad_norm": 2.1270662108812197,
"learning_rate": 7.596495486024402e-07,
"loss": 0.3654,
"step": 1570
},
{
"epoch": 0.8426966292134831,
"grad_norm": 1.983391985023499,
"learning_rate": 7.350922498784335e-07,
"loss": 0.3541,
"step": 1575
},
{
"epoch": 0.8453718566078117,
"grad_norm": 2.198475196644775,
"learning_rate": 7.109069094565024e-07,
"loss": 0.3648,
"step": 1580
},
{
"epoch": 0.8480470840021401,
"grad_norm": 2.205841690533306,
"learning_rate": 6.870956366291998e-07,
"loss": 0.3624,
"step": 1585
},
{
"epoch": 0.8507223113964687,
"grad_norm": 2.1794505692130346,
"learning_rate": 6.636605080652686e-07,
"loss": 0.3668,
"step": 1590
},
{
"epoch": 0.8533975387907973,
"grad_norm": 1.9111510249255248,
"learning_rate": 6.406035676285244e-07,
"loss": 0.3544,
"step": 1595
},
{
"epoch": 0.8560727661851257,
"grad_norm": 2.1110504615144516,
"learning_rate": 6.179268261996052e-07,
"loss": 0.3566,
"step": 1600
},
{
"epoch": 0.8587479935794543,
"grad_norm": 2.188040981147882,
"learning_rate": 5.956322615005928e-07,
"loss": 0.3567,
"step": 1605
},
{
"epoch": 0.8614232209737828,
"grad_norm": 2.129123341428062,
"learning_rate": 5.737218179225318e-07,
"loss": 0.3597,
"step": 1610
},
{
"epoch": 0.8640984483681113,
"grad_norm": 2.2661635965704354,
"learning_rate": 5.521974063558477e-07,
"loss": 0.3465,
"step": 1615
},
{
"epoch": 0.8667736757624398,
"grad_norm": 2.0733718404139214,
"learning_rate": 5.310609040236963e-07,
"loss": 0.3623,
"step": 1620
},
{
"epoch": 0.8694489031567684,
"grad_norm": 2.0645801046201924,
"learning_rate": 5.103141543182389e-07,
"loss": 0.3515,
"step": 1625
},
{
"epoch": 0.8721241305510968,
"grad_norm": 2.1264968413170813,
"learning_rate": 4.89958966639878e-07,
"loss": 0.3569,
"step": 1630
},
{
"epoch": 0.8747993579454254,
"grad_norm": 2.2338534353746975,
"learning_rate": 4.6999711623944787e-07,
"loss": 0.372,
"step": 1635
},
{
"epoch": 0.8774745853397539,
"grad_norm": 2.0685939146034134,
"learning_rate": 4.504303440633928e-07,
"loss": 0.3548,
"step": 1640
},
{
"epoch": 0.8801498127340824,
"grad_norm": 2.0566412860493464,
"learning_rate": 4.3126035660193076e-07,
"loss": 0.3455,
"step": 1645
},
{
"epoch": 0.8828250401284109,
"grad_norm": 2.06564689656007,
"learning_rate": 4.124888257402243e-07,
"loss": 0.3601,
"step": 1650
},
{
"epoch": 0.8855002675227395,
"grad_norm": 2.0752725981286146,
"learning_rate": 3.9411738861256934e-07,
"loss": 0.3452,
"step": 1655
},
{
"epoch": 0.8881754949170679,
"grad_norm": 2.1007592521360126,
"learning_rate": 3.7614764745961377e-07,
"loss": 0.3526,
"step": 1660
},
{
"epoch": 0.8908507223113965,
"grad_norm": 2.181576068488826,
"learning_rate": 3.585811694886232e-07,
"loss": 0.3541,
"step": 1665
},
{
"epoch": 0.893525949705725,
"grad_norm": 2.114550430452272,
"learning_rate": 3.4141948673679593e-07,
"loss": 0.3518,
"step": 1670
},
{
"epoch": 0.8962011771000535,
"grad_norm": 2.231068650487946,
"learning_rate": 3.2466409593764734e-07,
"loss": 0.3616,
"step": 1675
},
{
"epoch": 0.898876404494382,
"grad_norm": 2.12861720562827,
"learning_rate": 3.083164583904802e-07,
"loss": 0.3557,
"step": 1680
},
{
"epoch": 0.9015516318887106,
"grad_norm": 2.169404584291598,
"learning_rate": 2.923779998329318e-07,
"loss": 0.3468,
"step": 1685
},
{
"epoch": 0.904226859283039,
"grad_norm": 2.2665912627763065,
"learning_rate": 2.76850110316636e-07,
"loss": 0.3546,
"step": 1690
},
{
"epoch": 0.9069020866773676,
"grad_norm": 2.1960373464962726,
"learning_rate": 2.617341440859883e-07,
"loss": 0.3462,
"step": 1695
},
{
"epoch": 0.9095773140716961,
"grad_norm": 2.0809316367006665,
"learning_rate": 2.470314194600376e-07,
"loss": 0.3649,
"step": 1700
},
{
"epoch": 0.9122525414660246,
"grad_norm": 2.0224759056792987,
"learning_rate": 2.3274321871751436e-07,
"loss": 0.354,
"step": 1705
},
{
"epoch": 0.9149277688603531,
"grad_norm": 2.1624428592061147,
"learning_rate": 2.1887078798499272e-07,
"loss": 0.3574,
"step": 1710
},
{
"epoch": 0.9176029962546817,
"grad_norm": 2.0498930768575185,
"learning_rate": 2.0541533712821527e-07,
"loss": 0.3529,
"step": 1715
},
{
"epoch": 0.9202782236490101,
"grad_norm": 2.0452925202457966,
"learning_rate": 1.923780396465741e-07,
"loss": 0.3595,
"step": 1720
},
{
"epoch": 0.9229534510433387,
"grad_norm": 2.1054217236205766,
"learning_rate": 1.7976003257076823e-07,
"loss": 0.3625,
"step": 1725
},
{
"epoch": 0.9256286784376672,
"grad_norm": 2.1836475493288745,
"learning_rate": 1.6756241636363413e-07,
"loss": 0.3538,
"step": 1730
},
{
"epoch": 0.9283039058319957,
"grad_norm": 2.155159547527028,
"learning_rate": 1.557862548241762e-07,
"loss": 0.3488,
"step": 1735
},
{
"epoch": 0.9309791332263242,
"grad_norm": 2.029074950338154,
"learning_rate": 1.4443257499478447e-07,
"loss": 0.346,
"step": 1740
},
{
"epoch": 0.9336543606206528,
"grad_norm": 2.1486539205924773,
"learning_rate": 1.3350236707166508e-07,
"loss": 0.3535,
"step": 1745
},
{
"epoch": 0.9363295880149812,
"grad_norm": 2.2476777610471608,
"learning_rate": 1.229965843184805e-07,
"loss": 0.3508,
"step": 1750
},
{
"epoch": 0.9390048154093098,
"grad_norm": 2.220267151551913,
"learning_rate": 1.1291614298321097e-07,
"loss": 0.3443,
"step": 1755
},
{
"epoch": 0.9416800428036383,
"grad_norm": 2.216803554521132,
"learning_rate": 1.0326192221824738e-07,
"loss": 0.353,
"step": 1760
},
{
"epoch": 0.9443552701979668,
"grad_norm": 2.1635962319508,
"learning_rate": 9.403476400371425e-08,
"loss": 0.3465,
"step": 1765
},
{
"epoch": 0.9470304975922953,
"grad_norm": 2.0907412381152732,
"learning_rate": 8.523547307404179e-08,
"loss": 0.3511,
"step": 1770
},
{
"epoch": 0.9497057249866239,
"grad_norm": 2.0796481307740096,
"learning_rate": 7.686481684777758e-08,
"loss": 0.3411,
"step": 1775
},
{
"epoch": 0.9523809523809523,
"grad_norm": 1.994482220364353,
"learning_rate": 6.89235253606596e-08,
"loss": 0.3445,
"step": 1780
},
{
"epoch": 0.9550561797752809,
"grad_norm": 2.0050959465920726,
"learning_rate": 6.141229120194714e-08,
"loss": 0.3637,
"step": 1785
},
{
"epoch": 0.9577314071696095,
"grad_norm": 2.07485414671568,
"learning_rate": 5.4331769454016306e-08,
"loss": 0.343,
"step": 1790
},
{
"epoch": 0.9604066345639379,
"grad_norm": 2.1043644840923035,
"learning_rate": 4.76825776352291e-08,
"loss": 0.3544,
"step": 1795
},
{
"epoch": 0.9630818619582665,
"grad_norm": 2.022384726444111,
"learning_rate": 4.1465295646076484e-08,
"loss": 0.3484,
"step": 1800
},
{
"epoch": 0.965757089352595,
"grad_norm": 2.0189004481365718,
"learning_rate": 3.568046571860384e-08,
"loss": 0.3418,
"step": 1805
},
{
"epoch": 0.9684323167469235,
"grad_norm": 2.3234121649969235,
"learning_rate": 3.0328592369120443e-08,
"loss": 0.3419,
"step": 1810
},
{
"epoch": 0.971107544141252,
"grad_norm": 2.0669579682456085,
"learning_rate": 2.541014235419914e-08,
"loss": 0.3496,
"step": 1815
},
{
"epoch": 0.9737827715355806,
"grad_norm": 2.1554111970066914,
"learning_rate": 2.0925544629967763e-08,
"loss": 0.3478,
"step": 1820
},
{
"epoch": 0.976457998929909,
"grad_norm": 1.9952413095700356,
"learning_rate": 1.6875190314700197e-08,
"loss": 0.3493,
"step": 1825
},
{
"epoch": 0.9791332263242376,
"grad_norm": 2.0255992605548143,
"learning_rate": 1.3259432654703641e-08,
"loss": 0.3432,
"step": 1830
},
{
"epoch": 0.9818084537185661,
"grad_norm": 2.1257709432408567,
"learning_rate": 1.0078586993511052e-08,
"loss": 0.3377,
"step": 1835
},
{
"epoch": 0.9844836811128946,
"grad_norm": 2.11825113376405,
"learning_rate": 7.332930744380906e-09,
"loss": 0.3608,
"step": 1840
},
{
"epoch": 0.9871589085072231,
"grad_norm": 2.134378305287722,
"learning_rate": 5.0227033660987804e-09,
"loss": 0.3479,
"step": 1845
},
{
"epoch": 0.9898341359015517,
"grad_norm": 1.9561125297371678,
"learning_rate": 3.1481063420985e-09,
"loss": 0.3457,
"step": 1850
},
{
"epoch": 0.9925093632958801,
"grad_norm": 1.9864892600852198,
"learning_rate": 1.7093031628850899e-09,
"loss": 0.3396,
"step": 1855
},
{
"epoch": 0.9951845906902087,
"grad_norm": 2.1474595960099516,
"learning_rate": 7.064193117806151e-10,
"loss": 0.3497,
"step": 1860
},
{
"epoch": 0.9978598180845372,
"grad_norm": 2.0927622927662566,
"learning_rate": 1.3954225397516673e-10,
"loss": 0.3529,
"step": 1865
},
{
"epoch": 1.0,
"eval_loss": 0.28742796182632446,
"eval_runtime": 1.1914,
"eval_samples_per_second": 1.679,
"eval_steps_per_second": 0.839,
"step": 1869
},
{
"epoch": 1.0,
"step": 1869,
"total_flos": 195665288232960.0,
"train_loss": 0.5158078530180448,
"train_runtime": 18877.6023,
"train_samples_per_second": 1.584,
"train_steps_per_second": 0.099
}
],
"logging_steps": 5,
"max_steps": 1869,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 195665288232960.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}