phi-2-tuned / trainer_state.json
guanqun-yang's picture
update
edc8489
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 3189,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"grad_norm": 20.48359489440918,
"learning_rate": 1.8796992481203008e-07,
"loss": 0.6209,
"step": 10
},
{
"epoch": 0.02,
"grad_norm": 5.289457321166992,
"learning_rate": 3.7593984962406015e-07,
"loss": 0.2829,
"step": 20
},
{
"epoch": 0.03,
"grad_norm": 0.8980396389961243,
"learning_rate": 5.639097744360903e-07,
"loss": 0.0863,
"step": 30
},
{
"epoch": 0.04,
"grad_norm": 0.25141441822052,
"learning_rate": 7.518796992481203e-07,
"loss": 0.0941,
"step": 40
},
{
"epoch": 0.05,
"grad_norm": 0.20232008397579193,
"learning_rate": 9.398496240601504e-07,
"loss": 0.0735,
"step": 50
},
{
"epoch": 0.06,
"grad_norm": 1.138559341430664,
"learning_rate": 1.1278195488721805e-06,
"loss": 0.0802,
"step": 60
},
{
"epoch": 0.07,
"grad_norm": 0.6884934306144714,
"learning_rate": 1.3157894736842106e-06,
"loss": 0.0741,
"step": 70
},
{
"epoch": 0.08,
"grad_norm": 1.3850356340408325,
"learning_rate": 1.5037593984962406e-06,
"loss": 0.0631,
"step": 80
},
{
"epoch": 0.08,
"grad_norm": 0.5666308403015137,
"learning_rate": 1.6917293233082707e-06,
"loss": 0.06,
"step": 90
},
{
"epoch": 0.09,
"grad_norm": 0.5473531484603882,
"learning_rate": 1.8796992481203007e-06,
"loss": 0.0475,
"step": 100
},
{
"epoch": 0.1,
"grad_norm": 0.7816307544708252,
"learning_rate": 2.067669172932331e-06,
"loss": 0.0582,
"step": 110
},
{
"epoch": 0.11,
"grad_norm": 0.9976327419281006,
"learning_rate": 2.255639097744361e-06,
"loss": 0.0439,
"step": 120
},
{
"epoch": 0.12,
"grad_norm": 1.050405740737915,
"learning_rate": 2.443609022556391e-06,
"loss": 0.0439,
"step": 130
},
{
"epoch": 0.13,
"grad_norm": 0.8269789814949036,
"learning_rate": 2.631578947368421e-06,
"loss": 0.059,
"step": 140
},
{
"epoch": 0.14,
"grad_norm": 0.14458781480789185,
"learning_rate": 2.8195488721804516e-06,
"loss": 0.0366,
"step": 150
},
{
"epoch": 0.15,
"grad_norm": 1.4295405149459839,
"learning_rate": 3.007518796992481e-06,
"loss": 0.0602,
"step": 160
},
{
"epoch": 0.16,
"grad_norm": 1.3261005878448486,
"learning_rate": 3.1954887218045117e-06,
"loss": 0.0449,
"step": 170
},
{
"epoch": 0.17,
"grad_norm": 2.0573761463165283,
"learning_rate": 3.3834586466165413e-06,
"loss": 0.0564,
"step": 180
},
{
"epoch": 0.18,
"grad_norm": 0.9643514156341553,
"learning_rate": 3.5714285714285718e-06,
"loss": 0.0559,
"step": 190
},
{
"epoch": 0.19,
"grad_norm": 0.8114656805992126,
"learning_rate": 3.7593984962406014e-06,
"loss": 0.0402,
"step": 200
},
{
"epoch": 0.2,
"grad_norm": 0.7962890267372131,
"learning_rate": 3.947368421052632e-06,
"loss": 0.0486,
"step": 210
},
{
"epoch": 0.21,
"grad_norm": 1.7510570287704468,
"learning_rate": 4.135338345864662e-06,
"loss": 0.0444,
"step": 220
},
{
"epoch": 0.22,
"grad_norm": 0.9593592882156372,
"learning_rate": 4.323308270676692e-06,
"loss": 0.0374,
"step": 230
},
{
"epoch": 0.23,
"grad_norm": 0.5025224089622498,
"learning_rate": 4.511278195488722e-06,
"loss": 0.0337,
"step": 240
},
{
"epoch": 0.24,
"grad_norm": 1.6147772073745728,
"learning_rate": 4.6992481203007525e-06,
"loss": 0.035,
"step": 250
},
{
"epoch": 0.24,
"grad_norm": 1.0601171255111694,
"learning_rate": 4.887218045112782e-06,
"loss": 0.0445,
"step": 260
},
{
"epoch": 0.25,
"grad_norm": 1.0513237714767456,
"learning_rate": 5.075187969924813e-06,
"loss": 0.0495,
"step": 270
},
{
"epoch": 0.26,
"grad_norm": 1.3772203922271729,
"learning_rate": 5.263157894736842e-06,
"loss": 0.061,
"step": 280
},
{
"epoch": 0.27,
"grad_norm": 0.48920339345932007,
"learning_rate": 5.451127819548873e-06,
"loss": 0.0492,
"step": 290
},
{
"epoch": 0.28,
"grad_norm": 1.1981807947158813,
"learning_rate": 5.639097744360903e-06,
"loss": 0.0432,
"step": 300
},
{
"epoch": 0.29,
"grad_norm": 0.28824716806411743,
"learning_rate": 5.827067669172934e-06,
"loss": 0.0412,
"step": 310
},
{
"epoch": 0.3,
"grad_norm": 0.5675559639930725,
"learning_rate": 6.015037593984962e-06,
"loss": 0.0419,
"step": 320
},
{
"epoch": 0.31,
"grad_norm": 0.18200090527534485,
"learning_rate": 6.203007518796993e-06,
"loss": 0.0278,
"step": 330
},
{
"epoch": 0.32,
"grad_norm": 1.1105334758758545,
"learning_rate": 6.390977443609023e-06,
"loss": 0.0454,
"step": 340
},
{
"epoch": 0.33,
"grad_norm": 0.6391922235488892,
"learning_rate": 6.578947368421054e-06,
"loss": 0.0343,
"step": 350
},
{
"epoch": 0.34,
"grad_norm": 0.5360444784164429,
"learning_rate": 6.766917293233083e-06,
"loss": 0.036,
"step": 360
},
{
"epoch": 0.35,
"grad_norm": 0.8997634053230286,
"learning_rate": 6.954887218045113e-06,
"loss": 0.033,
"step": 370
},
{
"epoch": 0.36,
"grad_norm": 0.6569644808769226,
"learning_rate": 7.1428571428571436e-06,
"loss": 0.0349,
"step": 380
},
{
"epoch": 0.37,
"grad_norm": 1.5074034929275513,
"learning_rate": 7.330827067669174e-06,
"loss": 0.0358,
"step": 390
},
{
"epoch": 0.38,
"grad_norm": 0.5094283819198608,
"learning_rate": 7.518796992481203e-06,
"loss": 0.0331,
"step": 400
},
{
"epoch": 0.39,
"grad_norm": 0.9734311103820801,
"learning_rate": 7.706766917293233e-06,
"loss": 0.0507,
"step": 410
},
{
"epoch": 0.4,
"grad_norm": 0.4237803518772125,
"learning_rate": 7.894736842105265e-06,
"loss": 0.041,
"step": 420
},
{
"epoch": 0.4,
"grad_norm": 0.514431893825531,
"learning_rate": 8.082706766917294e-06,
"loss": 0.0476,
"step": 430
},
{
"epoch": 0.41,
"grad_norm": 1.05610990524292,
"learning_rate": 8.270676691729324e-06,
"loss": 0.0445,
"step": 440
},
{
"epoch": 0.42,
"grad_norm": 0.6562148332595825,
"learning_rate": 8.458646616541353e-06,
"loss": 0.0771,
"step": 450
},
{
"epoch": 0.43,
"grad_norm": 0.2789516746997833,
"learning_rate": 8.646616541353385e-06,
"loss": 0.0426,
"step": 460
},
{
"epoch": 0.44,
"grad_norm": 0.25616365671157837,
"learning_rate": 8.834586466165414e-06,
"loss": 0.0177,
"step": 470
},
{
"epoch": 0.45,
"grad_norm": 1.472976565361023,
"learning_rate": 9.022556390977444e-06,
"loss": 0.0344,
"step": 480
},
{
"epoch": 0.46,
"grad_norm": 0.6178507804870605,
"learning_rate": 9.210526315789474e-06,
"loss": 0.0534,
"step": 490
},
{
"epoch": 0.47,
"grad_norm": 1.1991667747497559,
"learning_rate": 9.398496240601505e-06,
"loss": 0.0338,
"step": 500
},
{
"epoch": 0.48,
"grad_norm": 0.4456295073032379,
"learning_rate": 9.586466165413535e-06,
"loss": 0.0322,
"step": 510
},
{
"epoch": 0.49,
"grad_norm": 0.6090606451034546,
"learning_rate": 9.774436090225564e-06,
"loss": 0.047,
"step": 520
},
{
"epoch": 0.5,
"grad_norm": 0.4637684226036072,
"learning_rate": 9.962406015037594e-06,
"loss": 0.0479,
"step": 530
},
{
"epoch": 0.51,
"grad_norm": 0.7359185218811035,
"learning_rate": 9.983274095755803e-06,
"loss": 0.0365,
"step": 540
},
{
"epoch": 0.52,
"grad_norm": 0.5810590386390686,
"learning_rate": 9.962366715450555e-06,
"loss": 0.0276,
"step": 550
},
{
"epoch": 0.53,
"grad_norm": 0.31649720668792725,
"learning_rate": 9.941459335145307e-06,
"loss": 0.0327,
"step": 560
},
{
"epoch": 0.54,
"grad_norm": 1.11226224899292,
"learning_rate": 9.92055195484006e-06,
"loss": 0.0361,
"step": 570
},
{
"epoch": 0.55,
"grad_norm": 0.257317453622818,
"learning_rate": 9.89964457453481e-06,
"loss": 0.035,
"step": 580
},
{
"epoch": 0.56,
"grad_norm": 0.9321485757827759,
"learning_rate": 9.878737194229565e-06,
"loss": 0.0491,
"step": 590
},
{
"epoch": 0.56,
"grad_norm": 0.5542380809783936,
"learning_rate": 9.857829813924315e-06,
"loss": 0.0242,
"step": 600
},
{
"epoch": 0.57,
"grad_norm": 0.8897249698638916,
"learning_rate": 9.836922433619069e-06,
"loss": 0.042,
"step": 610
},
{
"epoch": 0.58,
"grad_norm": 0.3682314157485962,
"learning_rate": 9.81601505331382e-06,
"loss": 0.024,
"step": 620
},
{
"epoch": 0.59,
"grad_norm": 1.0642801523208618,
"learning_rate": 9.795107673008573e-06,
"loss": 0.0402,
"step": 630
},
{
"epoch": 0.6,
"grad_norm": 0.27436748147010803,
"learning_rate": 9.774200292703325e-06,
"loss": 0.0318,
"step": 640
},
{
"epoch": 0.61,
"grad_norm": 0.13485893607139587,
"learning_rate": 9.753292912398077e-06,
"loss": 0.0265,
"step": 650
},
{
"epoch": 0.62,
"grad_norm": 1.0160752534866333,
"learning_rate": 9.73238553209283e-06,
"loss": 0.0257,
"step": 660
},
{
"epoch": 0.63,
"grad_norm": 0.2634833753108978,
"learning_rate": 9.711478151787582e-06,
"loss": 0.0358,
"step": 670
},
{
"epoch": 0.64,
"grad_norm": 0.27712008357048035,
"learning_rate": 9.690570771482334e-06,
"loss": 0.035,
"step": 680
},
{
"epoch": 0.65,
"grad_norm": 0.4950433075428009,
"learning_rate": 9.669663391177086e-06,
"loss": 0.0333,
"step": 690
},
{
"epoch": 0.66,
"grad_norm": 0.3358776867389679,
"learning_rate": 9.648756010871838e-06,
"loss": 0.0395,
"step": 700
},
{
"epoch": 0.67,
"grad_norm": 0.2755337655544281,
"learning_rate": 9.62784863056659e-06,
"loss": 0.0332,
"step": 710
},
{
"epoch": 0.68,
"grad_norm": 0.378814697265625,
"learning_rate": 9.606941250261344e-06,
"loss": 0.0386,
"step": 720
},
{
"epoch": 0.69,
"grad_norm": 0.40000224113464355,
"learning_rate": 9.586033869956095e-06,
"loss": 0.0336,
"step": 730
},
{
"epoch": 0.7,
"grad_norm": 0.19049006700515747,
"learning_rate": 9.565126489650848e-06,
"loss": 0.0313,
"step": 740
},
{
"epoch": 0.71,
"grad_norm": 0.11645769327878952,
"learning_rate": 9.544219109345599e-06,
"loss": 0.0278,
"step": 750
},
{
"epoch": 0.71,
"grad_norm": 0.1895197182893753,
"learning_rate": 9.523311729040352e-06,
"loss": 0.0272,
"step": 760
},
{
"epoch": 0.72,
"grad_norm": 0.22027094662189484,
"learning_rate": 9.502404348735104e-06,
"loss": 0.0337,
"step": 770
},
{
"epoch": 0.73,
"grad_norm": 0.25732624530792236,
"learning_rate": 9.481496968429856e-06,
"loss": 0.0358,
"step": 780
},
{
"epoch": 0.74,
"grad_norm": 0.4771791696548462,
"learning_rate": 9.460589588124608e-06,
"loss": 0.0324,
"step": 790
},
{
"epoch": 0.75,
"grad_norm": 0.18475191295146942,
"learning_rate": 9.43968220781936e-06,
"loss": 0.0348,
"step": 800
},
{
"epoch": 0.76,
"grad_norm": 0.36971214413642883,
"learning_rate": 9.418774827514114e-06,
"loss": 0.0366,
"step": 810
},
{
"epoch": 0.77,
"grad_norm": 0.6465145945549011,
"learning_rate": 9.397867447208866e-06,
"loss": 0.0304,
"step": 820
},
{
"epoch": 0.78,
"grad_norm": 1.547255516052246,
"learning_rate": 9.376960066903618e-06,
"loss": 0.0221,
"step": 830
},
{
"epoch": 0.79,
"grad_norm": 0.44453418254852295,
"learning_rate": 9.35605268659837e-06,
"loss": 0.0399,
"step": 840
},
{
"epoch": 0.8,
"grad_norm": 0.1706964075565338,
"learning_rate": 9.335145306293122e-06,
"loss": 0.0337,
"step": 850
},
{
"epoch": 0.81,
"grad_norm": 0.5651724934577942,
"learning_rate": 9.314237925987874e-06,
"loss": 0.0412,
"step": 860
},
{
"epoch": 0.82,
"grad_norm": 0.3307182490825653,
"learning_rate": 9.293330545682628e-06,
"loss": 0.0276,
"step": 870
},
{
"epoch": 0.83,
"grad_norm": 0.6166930794715881,
"learning_rate": 9.272423165377378e-06,
"loss": 0.0257,
"step": 880
},
{
"epoch": 0.84,
"grad_norm": 0.5756942629814148,
"learning_rate": 9.251515785072132e-06,
"loss": 0.036,
"step": 890
},
{
"epoch": 0.85,
"grad_norm": 0.1616719663143158,
"learning_rate": 9.230608404766884e-06,
"loss": 0.0157,
"step": 900
},
{
"epoch": 0.86,
"grad_norm": 0.6081432104110718,
"learning_rate": 9.209701024461636e-06,
"loss": 0.0452,
"step": 910
},
{
"epoch": 0.87,
"grad_norm": 0.9769769310951233,
"learning_rate": 9.188793644156388e-06,
"loss": 0.0305,
"step": 920
},
{
"epoch": 0.87,
"grad_norm": 0.5156053900718689,
"learning_rate": 9.16788626385114e-06,
"loss": 0.0344,
"step": 930
},
{
"epoch": 0.88,
"grad_norm": 0.30783510208129883,
"learning_rate": 9.146978883545892e-06,
"loss": 0.0193,
"step": 940
},
{
"epoch": 0.89,
"grad_norm": 0.44905951619148254,
"learning_rate": 9.126071503240644e-06,
"loss": 0.034,
"step": 950
},
{
"epoch": 0.9,
"grad_norm": 0.3008134067058563,
"learning_rate": 9.105164122935398e-06,
"loss": 0.0272,
"step": 960
},
{
"epoch": 0.91,
"grad_norm": 0.44791507720947266,
"learning_rate": 9.084256742630148e-06,
"loss": 0.0376,
"step": 970
},
{
"epoch": 0.92,
"grad_norm": 0.7329548001289368,
"learning_rate": 9.063349362324902e-06,
"loss": 0.0424,
"step": 980
},
{
"epoch": 0.93,
"grad_norm": 0.16488119959831238,
"learning_rate": 9.042441982019654e-06,
"loss": 0.0301,
"step": 990
},
{
"epoch": 0.94,
"grad_norm": 0.4265969395637512,
"learning_rate": 9.021534601714406e-06,
"loss": 0.022,
"step": 1000
},
{
"epoch": 0.95,
"grad_norm": 0.2555549740791321,
"learning_rate": 9.000627221409158e-06,
"loss": 0.0319,
"step": 1010
},
{
"epoch": 0.96,
"grad_norm": 0.7876251339912415,
"learning_rate": 8.97971984110391e-06,
"loss": 0.0409,
"step": 1020
},
{
"epoch": 0.97,
"grad_norm": 0.32860153913497925,
"learning_rate": 8.958812460798662e-06,
"loss": 0.0316,
"step": 1030
},
{
"epoch": 0.98,
"grad_norm": 0.36898553371429443,
"learning_rate": 8.937905080493416e-06,
"loss": 0.0325,
"step": 1040
},
{
"epoch": 0.99,
"grad_norm": 0.13984709978103638,
"learning_rate": 8.916997700188168e-06,
"loss": 0.0225,
"step": 1050
},
{
"epoch": 1.0,
"grad_norm": 0.09932324290275574,
"learning_rate": 8.89609031988292e-06,
"loss": 0.0175,
"step": 1060
},
{
"epoch": 1.01,
"grad_norm": 0.16241900622844696,
"learning_rate": 8.875182939577672e-06,
"loss": 0.0222,
"step": 1070
},
{
"epoch": 1.02,
"grad_norm": 0.14714168012142181,
"learning_rate": 8.854275559272424e-06,
"loss": 0.0207,
"step": 1080
},
{
"epoch": 1.03,
"grad_norm": 0.24781319499015808,
"learning_rate": 8.833368178967177e-06,
"loss": 0.0172,
"step": 1090
},
{
"epoch": 1.03,
"grad_norm": 0.3309984803199768,
"learning_rate": 8.812460798661928e-06,
"loss": 0.0377,
"step": 1100
},
{
"epoch": 1.04,
"grad_norm": 0.35171088576316833,
"learning_rate": 8.791553418356681e-06,
"loss": 0.0406,
"step": 1110
},
{
"epoch": 1.05,
"grad_norm": 0.3367606997489929,
"learning_rate": 8.770646038051432e-06,
"loss": 0.032,
"step": 1120
},
{
"epoch": 1.06,
"grad_norm": 0.13503430783748627,
"learning_rate": 8.749738657746185e-06,
"loss": 0.0299,
"step": 1130
},
{
"epoch": 1.07,
"grad_norm": 0.3619076907634735,
"learning_rate": 8.728831277440937e-06,
"loss": 0.0319,
"step": 1140
},
{
"epoch": 1.08,
"grad_norm": 0.40823495388031006,
"learning_rate": 8.70792389713569e-06,
"loss": 0.0249,
"step": 1150
},
{
"epoch": 1.09,
"grad_norm": 0.7125621438026428,
"learning_rate": 8.687016516830441e-06,
"loss": 0.0235,
"step": 1160
},
{
"epoch": 1.1,
"grad_norm": 0.12342959642410278,
"learning_rate": 8.666109136525193e-06,
"loss": 0.0288,
"step": 1170
},
{
"epoch": 1.11,
"grad_norm": 0.232752725481987,
"learning_rate": 8.645201756219947e-06,
"loss": 0.0354,
"step": 1180
},
{
"epoch": 1.12,
"grad_norm": 0.1255234330892563,
"learning_rate": 8.6242943759147e-06,
"loss": 0.0115,
"step": 1190
},
{
"epoch": 1.13,
"grad_norm": 0.26488494873046875,
"learning_rate": 8.603386995609451e-06,
"loss": 0.0247,
"step": 1200
},
{
"epoch": 1.14,
"grad_norm": 0.22945912182331085,
"learning_rate": 8.582479615304203e-06,
"loss": 0.0349,
"step": 1210
},
{
"epoch": 1.15,
"grad_norm": 0.3374781906604767,
"learning_rate": 8.561572234998955e-06,
"loss": 0.0303,
"step": 1220
},
{
"epoch": 1.16,
"grad_norm": 0.38748908042907715,
"learning_rate": 8.540664854693707e-06,
"loss": 0.0252,
"step": 1230
},
{
"epoch": 1.17,
"grad_norm": 0.2272762805223465,
"learning_rate": 8.519757474388461e-06,
"loss": 0.0325,
"step": 1240
},
{
"epoch": 1.18,
"grad_norm": 0.4768443703651428,
"learning_rate": 8.498850094083211e-06,
"loss": 0.0269,
"step": 1250
},
{
"epoch": 1.19,
"grad_norm": 0.32005059719085693,
"learning_rate": 8.477942713777965e-06,
"loss": 0.024,
"step": 1260
},
{
"epoch": 1.19,
"grad_norm": 0.2935084104537964,
"learning_rate": 8.457035333472717e-06,
"loss": 0.028,
"step": 1270
},
{
"epoch": 1.2,
"grad_norm": 0.17606884241104126,
"learning_rate": 8.436127953167469e-06,
"loss": 0.0314,
"step": 1280
},
{
"epoch": 1.21,
"grad_norm": 0.11081908643245697,
"learning_rate": 8.415220572862221e-06,
"loss": 0.0142,
"step": 1290
},
{
"epoch": 1.22,
"grad_norm": 0.06663521379232407,
"learning_rate": 8.394313192556973e-06,
"loss": 0.0217,
"step": 1300
},
{
"epoch": 1.23,
"grad_norm": 0.6221704483032227,
"learning_rate": 8.373405812251725e-06,
"loss": 0.0308,
"step": 1310
},
{
"epoch": 1.24,
"grad_norm": 0.13281384110450745,
"learning_rate": 8.352498431946477e-06,
"loss": 0.0295,
"step": 1320
},
{
"epoch": 1.25,
"grad_norm": 0.21760965883731842,
"learning_rate": 8.331591051641231e-06,
"loss": 0.0243,
"step": 1330
},
{
"epoch": 1.26,
"grad_norm": 0.29121580719947815,
"learning_rate": 8.310683671335981e-06,
"loss": 0.0207,
"step": 1340
},
{
"epoch": 1.27,
"grad_norm": 0.04138851910829544,
"learning_rate": 8.289776291030735e-06,
"loss": 0.03,
"step": 1350
},
{
"epoch": 1.28,
"grad_norm": 0.06128918379545212,
"learning_rate": 8.268868910725487e-06,
"loss": 0.0264,
"step": 1360
},
{
"epoch": 1.29,
"grad_norm": 0.3501634895801544,
"learning_rate": 8.247961530420239e-06,
"loss": 0.0193,
"step": 1370
},
{
"epoch": 1.3,
"grad_norm": 0.17542318999767303,
"learning_rate": 8.227054150114991e-06,
"loss": 0.0144,
"step": 1380
},
{
"epoch": 1.31,
"grad_norm": 0.10308554768562317,
"learning_rate": 8.206146769809743e-06,
"loss": 0.0543,
"step": 1390
},
{
"epoch": 1.32,
"grad_norm": 0.3240692615509033,
"learning_rate": 8.185239389504495e-06,
"loss": 0.0297,
"step": 1400
},
{
"epoch": 1.33,
"grad_norm": 0.09771730750799179,
"learning_rate": 8.164332009199249e-06,
"loss": 0.0334,
"step": 1410
},
{
"epoch": 1.34,
"grad_norm": 0.5148730278015137,
"learning_rate": 8.143424628894e-06,
"loss": 0.0209,
"step": 1420
},
{
"epoch": 1.35,
"grad_norm": 0.546535313129425,
"learning_rate": 8.122517248588753e-06,
"loss": 0.021,
"step": 1430
},
{
"epoch": 1.35,
"grad_norm": 0.7162196636199951,
"learning_rate": 8.101609868283505e-06,
"loss": 0.0218,
"step": 1440
},
{
"epoch": 1.36,
"grad_norm": 0.11191996932029724,
"learning_rate": 8.080702487978257e-06,
"loss": 0.0291,
"step": 1450
},
{
"epoch": 1.37,
"grad_norm": 0.04737411066889763,
"learning_rate": 8.05979510767301e-06,
"loss": 0.0254,
"step": 1460
},
{
"epoch": 1.38,
"grad_norm": 0.1907588094472885,
"learning_rate": 8.03888772736776e-06,
"loss": 0.0308,
"step": 1470
},
{
"epoch": 1.39,
"grad_norm": 0.18960894644260406,
"learning_rate": 8.017980347062515e-06,
"loss": 0.0157,
"step": 1480
},
{
"epoch": 1.4,
"grad_norm": 1.294573187828064,
"learning_rate": 7.997072966757265e-06,
"loss": 0.0162,
"step": 1490
},
{
"epoch": 1.41,
"grad_norm": 0.4640454947948456,
"learning_rate": 7.976165586452019e-06,
"loss": 0.0186,
"step": 1500
},
{
"epoch": 1.42,
"grad_norm": 0.6731734871864319,
"learning_rate": 7.95525820614677e-06,
"loss": 0.0144,
"step": 1510
},
{
"epoch": 1.43,
"grad_norm": 0.13755524158477783,
"learning_rate": 7.934350825841523e-06,
"loss": 0.0231,
"step": 1520
},
{
"epoch": 1.44,
"grad_norm": 0.16188235580921173,
"learning_rate": 7.913443445536275e-06,
"loss": 0.0276,
"step": 1530
},
{
"epoch": 1.45,
"grad_norm": 0.10896871984004974,
"learning_rate": 7.892536065231027e-06,
"loss": 0.0202,
"step": 1540
},
{
"epoch": 1.46,
"grad_norm": 0.08035355061292648,
"learning_rate": 7.871628684925779e-06,
"loss": 0.0143,
"step": 1550
},
{
"epoch": 1.47,
"grad_norm": 0.3662511706352234,
"learning_rate": 7.850721304620532e-06,
"loss": 0.0277,
"step": 1560
},
{
"epoch": 1.48,
"grad_norm": 0.5264787077903748,
"learning_rate": 7.829813924315284e-06,
"loss": 0.02,
"step": 1570
},
{
"epoch": 1.49,
"grad_norm": 0.9667835831642151,
"learning_rate": 7.808906544010036e-06,
"loss": 0.0236,
"step": 1580
},
{
"epoch": 1.5,
"grad_norm": 0.024623023346066475,
"learning_rate": 7.787999163704788e-06,
"loss": 0.0152,
"step": 1590
},
{
"epoch": 1.51,
"grad_norm": 0.01869453489780426,
"learning_rate": 7.76709178339954e-06,
"loss": 0.0165,
"step": 1600
},
{
"epoch": 1.51,
"grad_norm": 0.2934032678604126,
"learning_rate": 7.746184403094294e-06,
"loss": 0.0178,
"step": 1610
},
{
"epoch": 1.52,
"grad_norm": 0.1588401347398758,
"learning_rate": 7.725277022789044e-06,
"loss": 0.0174,
"step": 1620
},
{
"epoch": 1.53,
"grad_norm": 0.08647409826517105,
"learning_rate": 7.704369642483798e-06,
"loss": 0.0243,
"step": 1630
},
{
"epoch": 1.54,
"grad_norm": 0.16604632139205933,
"learning_rate": 7.683462262178549e-06,
"loss": 0.0323,
"step": 1640
},
{
"epoch": 1.55,
"grad_norm": 0.2372957319021225,
"learning_rate": 7.662554881873302e-06,
"loss": 0.0148,
"step": 1650
},
{
"epoch": 1.56,
"grad_norm": 0.1503574103116989,
"learning_rate": 7.641647501568054e-06,
"loss": 0.03,
"step": 1660
},
{
"epoch": 1.57,
"grad_norm": 0.029307017102837563,
"learning_rate": 7.620740121262806e-06,
"loss": 0.0221,
"step": 1670
},
{
"epoch": 1.58,
"grad_norm": 0.18366031348705292,
"learning_rate": 7.599832740957559e-06,
"loss": 0.015,
"step": 1680
},
{
"epoch": 1.59,
"grad_norm": 0.049557920545339584,
"learning_rate": 7.57892536065231e-06,
"loss": 0.0203,
"step": 1690
},
{
"epoch": 1.6,
"grad_norm": 0.18749569356441498,
"learning_rate": 7.558017980347063e-06,
"loss": 0.0221,
"step": 1700
},
{
"epoch": 1.61,
"grad_norm": 0.06821219623088837,
"learning_rate": 7.537110600041815e-06,
"loss": 0.0174,
"step": 1710
},
{
"epoch": 1.62,
"grad_norm": 0.27043628692626953,
"learning_rate": 7.516203219736567e-06,
"loss": 0.0214,
"step": 1720
},
{
"epoch": 1.63,
"grad_norm": 0.25328314304351807,
"learning_rate": 7.49529583943132e-06,
"loss": 0.0245,
"step": 1730
},
{
"epoch": 1.64,
"grad_norm": 0.11644481867551804,
"learning_rate": 7.474388459126072e-06,
"loss": 0.0218,
"step": 1740
},
{
"epoch": 1.65,
"grad_norm": 0.4062769114971161,
"learning_rate": 7.453481078820824e-06,
"loss": 0.0299,
"step": 1750
},
{
"epoch": 1.66,
"grad_norm": 0.30739670991897583,
"learning_rate": 7.432573698515576e-06,
"loss": 0.0351,
"step": 1760
},
{
"epoch": 1.67,
"grad_norm": 0.7801105380058289,
"learning_rate": 7.411666318210329e-06,
"loss": 0.0305,
"step": 1770
},
{
"epoch": 1.67,
"grad_norm": 0.29210716485977173,
"learning_rate": 7.390758937905082e-06,
"loss": 0.0274,
"step": 1780
},
{
"epoch": 1.68,
"grad_norm": 0.09251756221055984,
"learning_rate": 7.369851557599833e-06,
"loss": 0.0189,
"step": 1790
},
{
"epoch": 1.69,
"grad_norm": 0.1489870399236679,
"learning_rate": 7.348944177294586e-06,
"loss": 0.0256,
"step": 1800
},
{
"epoch": 1.7,
"grad_norm": 0.39114630222320557,
"learning_rate": 7.328036796989337e-06,
"loss": 0.0226,
"step": 1810
},
{
"epoch": 1.71,
"grad_norm": 0.19388756155967712,
"learning_rate": 7.30712941668409e-06,
"loss": 0.0287,
"step": 1820
},
{
"epoch": 1.72,
"grad_norm": 0.40138277411460876,
"learning_rate": 7.286222036378843e-06,
"loss": 0.0359,
"step": 1830
},
{
"epoch": 1.73,
"grad_norm": 0.34222084283828735,
"learning_rate": 7.265314656073594e-06,
"loss": 0.0298,
"step": 1840
},
{
"epoch": 1.74,
"grad_norm": 0.42924413084983826,
"learning_rate": 7.244407275768347e-06,
"loss": 0.0234,
"step": 1850
},
{
"epoch": 1.75,
"grad_norm": 0.26645031571388245,
"learning_rate": 7.223499895463099e-06,
"loss": 0.0202,
"step": 1860
},
{
"epoch": 1.76,
"grad_norm": 0.18649956583976746,
"learning_rate": 7.202592515157852e-06,
"loss": 0.0283,
"step": 1870
},
{
"epoch": 1.77,
"grad_norm": 0.23801521956920624,
"learning_rate": 7.181685134852604e-06,
"loss": 0.0187,
"step": 1880
},
{
"epoch": 1.78,
"grad_norm": 0.4391060769557953,
"learning_rate": 7.160777754547356e-06,
"loss": 0.0321,
"step": 1890
},
{
"epoch": 1.79,
"grad_norm": 0.8792116641998291,
"learning_rate": 7.139870374242109e-06,
"loss": 0.0272,
"step": 1900
},
{
"epoch": 1.8,
"grad_norm": 0.269652396440506,
"learning_rate": 7.11896299393686e-06,
"loss": 0.0304,
"step": 1910
},
{
"epoch": 1.81,
"grad_norm": 0.33860161900520325,
"learning_rate": 7.098055613631613e-06,
"loss": 0.0305,
"step": 1920
},
{
"epoch": 1.82,
"grad_norm": 0.4650591015815735,
"learning_rate": 7.0771482333263655e-06,
"loss": 0.0316,
"step": 1930
},
{
"epoch": 1.83,
"grad_norm": 0.1440097540616989,
"learning_rate": 7.056240853021117e-06,
"loss": 0.0223,
"step": 1940
},
{
"epoch": 1.83,
"grad_norm": 0.196376234292984,
"learning_rate": 7.0353334727158696e-06,
"loss": 0.0298,
"step": 1950
},
{
"epoch": 1.84,
"grad_norm": 0.617382824420929,
"learning_rate": 7.014426092410621e-06,
"loss": 0.0203,
"step": 1960
},
{
"epoch": 1.85,
"grad_norm": 0.14364871382713318,
"learning_rate": 6.993518712105374e-06,
"loss": 0.0217,
"step": 1970
},
{
"epoch": 1.86,
"grad_norm": 0.2357790470123291,
"learning_rate": 6.9726113318001265e-06,
"loss": 0.019,
"step": 1980
},
{
"epoch": 1.87,
"grad_norm": 0.13550205528736115,
"learning_rate": 6.9517039514948785e-06,
"loss": 0.0276,
"step": 1990
},
{
"epoch": 1.88,
"grad_norm": 0.20195119082927704,
"learning_rate": 6.9307965711896305e-06,
"loss": 0.0268,
"step": 2000
},
{
"epoch": 1.89,
"grad_norm": 0.19456946849822998,
"learning_rate": 6.9098891908843825e-06,
"loss": 0.0243,
"step": 2010
},
{
"epoch": 1.9,
"grad_norm": 0.5265508890151978,
"learning_rate": 6.888981810579135e-06,
"loss": 0.0342,
"step": 2020
},
{
"epoch": 1.91,
"grad_norm": 0.38090288639068604,
"learning_rate": 6.868074430273887e-06,
"loss": 0.0219,
"step": 2030
},
{
"epoch": 1.92,
"grad_norm": 0.30138471722602844,
"learning_rate": 6.847167049968639e-06,
"loss": 0.0193,
"step": 2040
},
{
"epoch": 1.93,
"grad_norm": 0.36553192138671875,
"learning_rate": 6.826259669663392e-06,
"loss": 0.0379,
"step": 2050
},
{
"epoch": 1.94,
"grad_norm": 0.27630284428596497,
"learning_rate": 6.8053522893581435e-06,
"loss": 0.016,
"step": 2060
},
{
"epoch": 1.95,
"grad_norm": 0.06610503047704697,
"learning_rate": 6.784444909052896e-06,
"loss": 0.0397,
"step": 2070
},
{
"epoch": 1.96,
"grad_norm": 0.257328599691391,
"learning_rate": 6.763537528747649e-06,
"loss": 0.0238,
"step": 2080
},
{
"epoch": 1.97,
"grad_norm": 0.1945256143808365,
"learning_rate": 6.7426301484424e-06,
"loss": 0.0212,
"step": 2090
},
{
"epoch": 1.98,
"grad_norm": 0.09793351590633392,
"learning_rate": 6.721722768137153e-06,
"loss": 0.0264,
"step": 2100
},
{
"epoch": 1.98,
"grad_norm": 0.3976469933986664,
"learning_rate": 6.700815387831905e-06,
"loss": 0.0294,
"step": 2110
},
{
"epoch": 1.99,
"grad_norm": 0.43862876296043396,
"learning_rate": 6.679908007526657e-06,
"loss": 0.0299,
"step": 2120
},
{
"epoch": 2.0,
"grad_norm": 0.3431900143623352,
"learning_rate": 6.659000627221409e-06,
"loss": 0.0196,
"step": 2130
},
{
"epoch": 2.01,
"grad_norm": 0.26596391201019287,
"learning_rate": 6.638093246916162e-06,
"loss": 0.0221,
"step": 2140
},
{
"epoch": 2.02,
"grad_norm": 0.5497652888298035,
"learning_rate": 6.617185866610914e-06,
"loss": 0.0144,
"step": 2150
},
{
"epoch": 2.03,
"grad_norm": 0.12056022882461548,
"learning_rate": 6.596278486305666e-06,
"loss": 0.0219,
"step": 2160
},
{
"epoch": 2.04,
"grad_norm": 0.2761884927749634,
"learning_rate": 6.575371106000419e-06,
"loss": 0.0152,
"step": 2170
},
{
"epoch": 2.05,
"grad_norm": 0.4963738024234772,
"learning_rate": 6.55446372569517e-06,
"loss": 0.012,
"step": 2180
},
{
"epoch": 2.06,
"grad_norm": 0.40073689818382263,
"learning_rate": 6.533556345389923e-06,
"loss": 0.027,
"step": 2190
},
{
"epoch": 2.07,
"grad_norm": 0.09300912916660309,
"learning_rate": 6.512648965084676e-06,
"loss": 0.0124,
"step": 2200
},
{
"epoch": 2.08,
"grad_norm": 0.49626946449279785,
"learning_rate": 6.491741584779427e-06,
"loss": 0.0122,
"step": 2210
},
{
"epoch": 2.09,
"grad_norm": 0.023487213999032974,
"learning_rate": 6.47083420447418e-06,
"loss": 0.0161,
"step": 2220
},
{
"epoch": 2.1,
"grad_norm": 0.0902651771903038,
"learning_rate": 6.449926824168932e-06,
"loss": 0.0269,
"step": 2230
},
{
"epoch": 2.11,
"grad_norm": 0.17983660101890564,
"learning_rate": 6.429019443863684e-06,
"loss": 0.0119,
"step": 2240
},
{
"epoch": 2.12,
"grad_norm": 0.1324489861726761,
"learning_rate": 6.408112063558437e-06,
"loss": 0.0278,
"step": 2250
},
{
"epoch": 2.13,
"grad_norm": 0.17838072776794434,
"learning_rate": 6.387204683253189e-06,
"loss": 0.0198,
"step": 2260
},
{
"epoch": 2.14,
"grad_norm": 0.2527213394641876,
"learning_rate": 6.366297302947941e-06,
"loss": 0.0154,
"step": 2270
},
{
"epoch": 2.14,
"grad_norm": 0.06320203095674515,
"learning_rate": 6.345389922642693e-06,
"loss": 0.0105,
"step": 2280
},
{
"epoch": 2.15,
"grad_norm": 0.17222613096237183,
"learning_rate": 6.324482542337446e-06,
"loss": 0.0131,
"step": 2290
},
{
"epoch": 2.16,
"grad_norm": 0.019979368895292282,
"learning_rate": 6.303575162032199e-06,
"loss": 0.0177,
"step": 2300
},
{
"epoch": 2.17,
"grad_norm": 0.6333135366439819,
"learning_rate": 6.28266778172695e-06,
"loss": 0.0205,
"step": 2310
},
{
"epoch": 2.18,
"grad_norm": 0.3780456483364105,
"learning_rate": 6.261760401421703e-06,
"loss": 0.0156,
"step": 2320
},
{
"epoch": 2.19,
"grad_norm": 0.3019196093082428,
"learning_rate": 6.240853021116454e-06,
"loss": 0.0139,
"step": 2330
},
{
"epoch": 2.2,
"grad_norm": 0.3984769582748413,
"learning_rate": 6.219945640811207e-06,
"loss": 0.0276,
"step": 2340
},
{
"epoch": 2.21,
"grad_norm": 0.22097636759281158,
"learning_rate": 6.19903826050596e-06,
"loss": 0.0198,
"step": 2350
},
{
"epoch": 2.22,
"grad_norm": 0.24413403868675232,
"learning_rate": 6.178130880200711e-06,
"loss": 0.018,
"step": 2360
},
{
"epoch": 2.23,
"grad_norm": 0.13032527267932892,
"learning_rate": 6.157223499895464e-06,
"loss": 0.0116,
"step": 2370
},
{
"epoch": 2.24,
"grad_norm": 0.2249244898557663,
"learning_rate": 6.136316119590216e-06,
"loss": 0.0118,
"step": 2380
},
{
"epoch": 2.25,
"grad_norm": 0.20351147651672363,
"learning_rate": 6.1154087392849685e-06,
"loss": 0.0257,
"step": 2390
},
{
"epoch": 2.26,
"grad_norm": 0.49516594409942627,
"learning_rate": 6.0945013589797206e-06,
"loss": 0.0154,
"step": 2400
},
{
"epoch": 2.27,
"grad_norm": 0.21651627123355865,
"learning_rate": 6.0735939786744726e-06,
"loss": 0.0135,
"step": 2410
},
{
"epoch": 2.28,
"grad_norm": 0.2528943121433258,
"learning_rate": 6.0526865983692254e-06,
"loss": 0.0298,
"step": 2420
},
{
"epoch": 2.29,
"grad_norm": 0.2160516232252121,
"learning_rate": 6.031779218063977e-06,
"loss": 0.0184,
"step": 2430
},
{
"epoch": 2.3,
"grad_norm": 0.14053639769554138,
"learning_rate": 6.0108718377587295e-06,
"loss": 0.0224,
"step": 2440
},
{
"epoch": 2.3,
"grad_norm": 0.23231235146522522,
"learning_rate": 5.989964457453482e-06,
"loss": 0.0152,
"step": 2450
},
{
"epoch": 2.31,
"grad_norm": 0.1623637080192566,
"learning_rate": 5.9690570771482335e-06,
"loss": 0.0089,
"step": 2460
},
{
"epoch": 2.32,
"grad_norm": 0.06655038148164749,
"learning_rate": 5.948149696842986e-06,
"loss": 0.0246,
"step": 2470
},
{
"epoch": 2.33,
"grad_norm": 0.508985698223114,
"learning_rate": 5.9272423165377375e-06,
"loss": 0.0144,
"step": 2480
},
{
"epoch": 2.34,
"grad_norm": 0.1423477828502655,
"learning_rate": 5.90633493623249e-06,
"loss": 0.0133,
"step": 2490
},
{
"epoch": 2.35,
"grad_norm": 0.29974058270454407,
"learning_rate": 5.885427555927243e-06,
"loss": 0.0133,
"step": 2500
},
{
"epoch": 2.36,
"grad_norm": 0.1467662900686264,
"learning_rate": 5.864520175621995e-06,
"loss": 0.0151,
"step": 2510
},
{
"epoch": 2.37,
"grad_norm": 0.09916812926530838,
"learning_rate": 5.843612795316747e-06,
"loss": 0.0173,
"step": 2520
},
{
"epoch": 2.38,
"grad_norm": 0.10352525860071182,
"learning_rate": 5.822705415011499e-06,
"loss": 0.0221,
"step": 2530
},
{
"epoch": 2.39,
"grad_norm": 0.5194036960601807,
"learning_rate": 5.801798034706252e-06,
"loss": 0.0258,
"step": 2540
},
{
"epoch": 2.4,
"grad_norm": 0.4094790816307068,
"learning_rate": 5.780890654401003e-06,
"loss": 0.018,
"step": 2550
},
{
"epoch": 2.41,
"grad_norm": 0.21202996373176575,
"learning_rate": 5.759983274095756e-06,
"loss": 0.0281,
"step": 2560
},
{
"epoch": 2.42,
"grad_norm": 0.11880209296941757,
"learning_rate": 5.739075893790509e-06,
"loss": 0.019,
"step": 2570
},
{
"epoch": 2.43,
"grad_norm": 0.15664508938789368,
"learning_rate": 5.71816851348526e-06,
"loss": 0.0162,
"step": 2580
},
{
"epoch": 2.44,
"grad_norm": 0.08462068438529968,
"learning_rate": 5.697261133180013e-06,
"loss": 0.0157,
"step": 2590
},
{
"epoch": 2.45,
"grad_norm": 0.5396077036857605,
"learning_rate": 5.676353752874765e-06,
"loss": 0.0184,
"step": 2600
},
{
"epoch": 2.46,
"grad_norm": 0.1727200448513031,
"learning_rate": 5.655446372569517e-06,
"loss": 0.0146,
"step": 2610
},
{
"epoch": 2.46,
"grad_norm": 0.27750787138938904,
"learning_rate": 5.63453899226427e-06,
"loss": 0.0137,
"step": 2620
},
{
"epoch": 2.47,
"grad_norm": 0.26835641264915466,
"learning_rate": 5.613631611959022e-06,
"loss": 0.0192,
"step": 2630
},
{
"epoch": 2.48,
"grad_norm": 0.8210089206695557,
"learning_rate": 5.592724231653774e-06,
"loss": 0.0195,
"step": 2640
},
{
"epoch": 2.49,
"grad_norm": 0.2197115570306778,
"learning_rate": 5.571816851348526e-06,
"loss": 0.021,
"step": 2650
},
{
"epoch": 2.5,
"grad_norm": 0.0288139246404171,
"learning_rate": 5.550909471043279e-06,
"loss": 0.0179,
"step": 2660
},
{
"epoch": 2.51,
"grad_norm": 0.4010894000530243,
"learning_rate": 5.530002090738031e-06,
"loss": 0.0163,
"step": 2670
},
{
"epoch": 2.52,
"grad_norm": 0.19797544181346893,
"learning_rate": 5.509094710432783e-06,
"loss": 0.0185,
"step": 2680
},
{
"epoch": 2.53,
"grad_norm": 0.06082729250192642,
"learning_rate": 5.488187330127536e-06,
"loss": 0.0245,
"step": 2690
},
{
"epoch": 2.54,
"grad_norm": 0.31951603293418884,
"learning_rate": 5.467279949822287e-06,
"loss": 0.0259,
"step": 2700
},
{
"epoch": 2.55,
"grad_norm": 0.11278670281171799,
"learning_rate": 5.44637256951704e-06,
"loss": 0.0131,
"step": 2710
},
{
"epoch": 2.56,
"grad_norm": 0.6295509934425354,
"learning_rate": 5.425465189211793e-06,
"loss": 0.0225,
"step": 2720
},
{
"epoch": 2.57,
"grad_norm": 0.12108682841062546,
"learning_rate": 5.404557808906544e-06,
"loss": 0.0139,
"step": 2730
},
{
"epoch": 2.58,
"grad_norm": 0.10168636590242386,
"learning_rate": 5.383650428601297e-06,
"loss": 0.0287,
"step": 2740
},
{
"epoch": 2.59,
"grad_norm": 0.06240009516477585,
"learning_rate": 5.362743048296049e-06,
"loss": 0.0155,
"step": 2750
},
{
"epoch": 2.6,
"grad_norm": 0.39674845337867737,
"learning_rate": 5.341835667990801e-06,
"loss": 0.019,
"step": 2760
},
{
"epoch": 2.61,
"grad_norm": 0.13627833127975464,
"learning_rate": 5.320928287685554e-06,
"loss": 0.0158,
"step": 2770
},
{
"epoch": 2.62,
"grad_norm": 0.15835390985012054,
"learning_rate": 5.300020907380306e-06,
"loss": 0.0145,
"step": 2780
},
{
"epoch": 2.62,
"grad_norm": 0.14348815381526947,
"learning_rate": 5.279113527075059e-06,
"loss": 0.0145,
"step": 2790
},
{
"epoch": 2.63,
"grad_norm": 0.08389858156442642,
"learning_rate": 5.25820614676981e-06,
"loss": 0.0085,
"step": 2800
},
{
"epoch": 2.64,
"grad_norm": 0.033944834023714066,
"learning_rate": 5.237298766464563e-06,
"loss": 0.0233,
"step": 2810
},
{
"epoch": 2.65,
"grad_norm": 0.031127430498600006,
"learning_rate": 5.2163913861593155e-06,
"loss": 0.0114,
"step": 2820
},
{
"epoch": 2.66,
"grad_norm": 0.1990656703710556,
"learning_rate": 5.195484005854067e-06,
"loss": 0.0162,
"step": 2830
},
{
"epoch": 2.67,
"grad_norm": 0.04372655227780342,
"learning_rate": 5.1745766255488195e-06,
"loss": 0.01,
"step": 2840
},
{
"epoch": 2.68,
"grad_norm": 0.34108391404151917,
"learning_rate": 5.153669245243571e-06,
"loss": 0.0166,
"step": 2850
},
{
"epoch": 2.69,
"grad_norm": 0.45837971568107605,
"learning_rate": 5.1327618649383236e-06,
"loss": 0.0192,
"step": 2860
},
{
"epoch": 2.7,
"grad_norm": 0.13491950929164886,
"learning_rate": 5.1118544846330764e-06,
"loss": 0.0162,
"step": 2870
},
{
"epoch": 2.71,
"grad_norm": 0.03369107097387314,
"learning_rate": 5.090947104327828e-06,
"loss": 0.0269,
"step": 2880
},
{
"epoch": 2.72,
"grad_norm": 0.28704699873924255,
"learning_rate": 5.0700397240225805e-06,
"loss": 0.0191,
"step": 2890
},
{
"epoch": 2.73,
"grad_norm": 0.6022350788116455,
"learning_rate": 5.0491323437173325e-06,
"loss": 0.0248,
"step": 2900
},
{
"epoch": 2.74,
"grad_norm": 0.3311866819858551,
"learning_rate": 5.028224963412085e-06,
"loss": 0.0337,
"step": 2910
},
{
"epoch": 2.75,
"grad_norm": 0.08945538103580475,
"learning_rate": 5.0073175831068365e-06,
"loss": 0.0166,
"step": 2920
},
{
"epoch": 2.76,
"grad_norm": 0.47838959097862244,
"learning_rate": 4.986410202801589e-06,
"loss": 0.0146,
"step": 2930
},
{
"epoch": 2.77,
"grad_norm": 0.0572611466050148,
"learning_rate": 4.965502822496341e-06,
"loss": 0.0086,
"step": 2940
},
{
"epoch": 2.78,
"grad_norm": 0.1355314403772354,
"learning_rate": 4.944595442191094e-06,
"loss": 0.0141,
"step": 2950
},
{
"epoch": 2.78,
"grad_norm": 0.10962551087141037,
"learning_rate": 4.923688061885846e-06,
"loss": 0.025,
"step": 2960
},
{
"epoch": 2.79,
"grad_norm": 0.3396119177341461,
"learning_rate": 4.902780681580598e-06,
"loss": 0.0171,
"step": 2970
},
{
"epoch": 2.8,
"grad_norm": 0.13699981570243835,
"learning_rate": 4.88187330127535e-06,
"loss": 0.012,
"step": 2980
},
{
"epoch": 2.81,
"grad_norm": 0.298997700214386,
"learning_rate": 4.860965920970102e-06,
"loss": 0.0218,
"step": 2990
},
{
"epoch": 2.82,
"grad_norm": 0.4527345895767212,
"learning_rate": 4.840058540664855e-06,
"loss": 0.0212,
"step": 3000
},
{
"epoch": 2.83,
"grad_norm": 0.15285594761371613,
"learning_rate": 4.819151160359607e-06,
"loss": 0.0162,
"step": 3010
},
{
"epoch": 2.84,
"grad_norm": 0.048979099839925766,
"learning_rate": 4.798243780054359e-06,
"loss": 0.0174,
"step": 3020
},
{
"epoch": 2.85,
"grad_norm": 0.05153890699148178,
"learning_rate": 4.777336399749112e-06,
"loss": 0.0149,
"step": 3030
},
{
"epoch": 2.86,
"grad_norm": 0.21725508570671082,
"learning_rate": 4.756429019443864e-06,
"loss": 0.0145,
"step": 3040
},
{
"epoch": 2.87,
"grad_norm": 0.16912169754505157,
"learning_rate": 4.735521639138616e-06,
"loss": 0.0179,
"step": 3050
},
{
"epoch": 2.88,
"grad_norm": 0.03088819980621338,
"learning_rate": 4.714614258833369e-06,
"loss": 0.0203,
"step": 3060
},
{
"epoch": 2.89,
"grad_norm": 0.17241007089614868,
"learning_rate": 4.693706878528121e-06,
"loss": 0.0151,
"step": 3070
},
{
"epoch": 2.9,
"grad_norm": 0.1420195996761322,
"learning_rate": 4.672799498222873e-06,
"loss": 0.0215,
"step": 3080
},
{
"epoch": 2.91,
"grad_norm": 0.23884275555610657,
"learning_rate": 4.651892117917625e-06,
"loss": 0.0135,
"step": 3090
},
{
"epoch": 2.92,
"grad_norm": 0.15318578481674194,
"learning_rate": 4.630984737612377e-06,
"loss": 0.0147,
"step": 3100
},
{
"epoch": 2.93,
"grad_norm": 0.20665739476680756,
"learning_rate": 4.61007735730713e-06,
"loss": 0.0143,
"step": 3110
},
{
"epoch": 2.94,
"grad_norm": 0.3664402663707733,
"learning_rate": 4.589169977001882e-06,
"loss": 0.0182,
"step": 3120
},
{
"epoch": 2.94,
"grad_norm": 0.3214040994644165,
"learning_rate": 4.568262596696634e-06,
"loss": 0.0199,
"step": 3130
},
{
"epoch": 2.95,
"grad_norm": 0.07582589983940125,
"learning_rate": 4.547355216391387e-06,
"loss": 0.012,
"step": 3140
},
{
"epoch": 2.96,
"grad_norm": 0.18084678053855896,
"learning_rate": 4.526447836086139e-06,
"loss": 0.0275,
"step": 3150
},
{
"epoch": 2.97,
"grad_norm": 0.9020859003067017,
"learning_rate": 4.505540455780891e-06,
"loss": 0.0208,
"step": 3160
},
{
"epoch": 2.98,
"grad_norm": 0.25799816846847534,
"learning_rate": 4.484633075475644e-06,
"loss": 0.0135,
"step": 3170
},
{
"epoch": 2.99,
"grad_norm": 0.10012619942426682,
"learning_rate": 4.463725695170396e-06,
"loss": 0.0202,
"step": 3180
}
],
"logging_steps": 10,
"max_steps": 5315,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"total_flos": 1.380935358480384e+18,
"train_batch_size": 224,
"trial_name": null,
"trial_params": null
}