timhua's picture
Upload folder using huggingface_hub
cef929d verified
{
"best_global_step": 5000,
"best_metric": 1.1470744609832764,
"best_model_checkpoint": "/workspace/actual_run/checkpoint-5000",
"epoch": 0.6012506012506013,
"eval_steps": 1000,
"global_step": 5000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0012025012025012026,
"grad_norm": 1.0899138450622559,
"learning_rate": 3.6e-06,
"loss": 2.3927,
"step": 10
},
{
"epoch": 0.002405002405002405,
"grad_norm": 0.9620404839515686,
"learning_rate": 7.6e-06,
"loss": 2.2959,
"step": 20
},
{
"epoch": 0.0036075036075036075,
"grad_norm": 0.43678194284439087,
"learning_rate": 1.16e-05,
"loss": 2.036,
"step": 30
},
{
"epoch": 0.00481000481000481,
"grad_norm": 0.35978421568870544,
"learning_rate": 1.56e-05,
"loss": 1.898,
"step": 40
},
{
"epoch": 0.006012506012506013,
"grad_norm": 0.2896435856819153,
"learning_rate": 1.9600000000000002e-05,
"loss": 1.7747,
"step": 50
},
{
"epoch": 0.007215007215007215,
"grad_norm": 0.2754799425601959,
"learning_rate": 2.36e-05,
"loss": 1.6628,
"step": 60
},
{
"epoch": 0.008417508417508417,
"grad_norm": 0.2863922119140625,
"learning_rate": 2.7600000000000003e-05,
"loss": 1.6091,
"step": 70
},
{
"epoch": 0.00962000962000962,
"grad_norm": 0.29406264424324036,
"learning_rate": 3.16e-05,
"loss": 1.5302,
"step": 80
},
{
"epoch": 0.010822510822510822,
"grad_norm": 0.32185423374176025,
"learning_rate": 3.56e-05,
"loss": 1.498,
"step": 90
},
{
"epoch": 0.012025012025012025,
"grad_norm": 0.34314361214637756,
"learning_rate": 3.960000000000001e-05,
"loss": 1.4561,
"step": 100
},
{
"epoch": 0.013227513227513227,
"grad_norm": 0.3503110706806183,
"learning_rate": 4.36e-05,
"loss": 1.4569,
"step": 110
},
{
"epoch": 0.01443001443001443,
"grad_norm": 0.3761644661426544,
"learning_rate": 4.76e-05,
"loss": 1.4043,
"step": 120
},
{
"epoch": 0.015632515632515633,
"grad_norm": 0.3741130530834198,
"learning_rate": 5.16e-05,
"loss": 1.4147,
"step": 130
},
{
"epoch": 0.016835016835016835,
"grad_norm": 0.381028950214386,
"learning_rate": 5.560000000000001e-05,
"loss": 1.4226,
"step": 140
},
{
"epoch": 0.018037518037518036,
"grad_norm": 0.3512011468410492,
"learning_rate": 5.96e-05,
"loss": 1.4034,
"step": 150
},
{
"epoch": 0.01924001924001924,
"grad_norm": 0.33474254608154297,
"learning_rate": 6.36e-05,
"loss": 1.3853,
"step": 160
},
{
"epoch": 0.020442520442520443,
"grad_norm": 0.335902601480484,
"learning_rate": 6.76e-05,
"loss": 1.3865,
"step": 170
},
{
"epoch": 0.021645021645021644,
"grad_norm": 0.31389617919921875,
"learning_rate": 7.16e-05,
"loss": 1.3739,
"step": 180
},
{
"epoch": 0.02284752284752285,
"grad_norm": 0.3155975043773651,
"learning_rate": 7.560000000000001e-05,
"loss": 1.3713,
"step": 190
},
{
"epoch": 0.02405002405002405,
"grad_norm": 0.31867676973342896,
"learning_rate": 7.960000000000001e-05,
"loss": 1.3689,
"step": 200
},
{
"epoch": 0.025252525252525252,
"grad_norm": 0.2963453531265259,
"learning_rate": 8.36e-05,
"loss": 1.3701,
"step": 210
},
{
"epoch": 0.026455026455026454,
"grad_norm": 0.3254355788230896,
"learning_rate": 8.76e-05,
"loss": 1.3618,
"step": 220
},
{
"epoch": 0.02765752765752766,
"grad_norm": 0.31009721755981445,
"learning_rate": 9.16e-05,
"loss": 1.3562,
"step": 230
},
{
"epoch": 0.02886002886002886,
"grad_norm": 0.2769719958305359,
"learning_rate": 9.56e-05,
"loss": 1.341,
"step": 240
},
{
"epoch": 0.03006253006253006,
"grad_norm": 0.29058948159217834,
"learning_rate": 9.960000000000001e-05,
"loss": 1.3509,
"step": 250
},
{
"epoch": 0.031265031265031266,
"grad_norm": 0.2969858646392822,
"learning_rate": 9.999969280942074e-05,
"loss": 1.3533,
"step": 260
},
{
"epoch": 0.032467532467532464,
"grad_norm": 0.27514412999153137,
"learning_rate": 9.999863092090635e-05,
"loss": 1.3382,
"step": 270
},
{
"epoch": 0.03367003367003367,
"grad_norm": 0.2757578492164612,
"learning_rate": 9.99968105580855e-05,
"loss": 1.3302,
"step": 280
},
{
"epoch": 0.034872534872534874,
"grad_norm": 0.2729010283946991,
"learning_rate": 9.999423174857289e-05,
"loss": 1.3325,
"step": 290
},
{
"epoch": 0.03607503607503607,
"grad_norm": 0.26693403720855713,
"learning_rate": 9.999089453148882e-05,
"loss": 1.3318,
"step": 300
},
{
"epoch": 0.03727753727753728,
"grad_norm": 0.26203256845474243,
"learning_rate": 9.998679895745853e-05,
"loss": 1.3308,
"step": 310
},
{
"epoch": 0.03848003848003848,
"grad_norm": 0.24240988492965698,
"learning_rate": 9.998194508861147e-05,
"loss": 1.3211,
"step": 320
},
{
"epoch": 0.03968253968253968,
"grad_norm": 0.23748330771923065,
"learning_rate": 9.997633299858036e-05,
"loss": 1.317,
"step": 330
},
{
"epoch": 0.040885040885040885,
"grad_norm": 0.27641433477401733,
"learning_rate": 9.996996277250007e-05,
"loss": 1.3049,
"step": 340
},
{
"epoch": 0.04208754208754209,
"grad_norm": 0.261136531829834,
"learning_rate": 9.996283450700628e-05,
"loss": 1.3099,
"step": 350
},
{
"epoch": 0.04329004329004329,
"grad_norm": 0.26694369316101074,
"learning_rate": 9.995494831023409e-05,
"loss": 1.3064,
"step": 360
},
{
"epoch": 0.04449254449254449,
"grad_norm": 0.2695152759552002,
"learning_rate": 9.994630430181631e-05,
"loss": 1.3085,
"step": 370
},
{
"epoch": 0.0456950456950457,
"grad_norm": 0.26653772592544556,
"learning_rate": 9.99369026128817e-05,
"loss": 1.3184,
"step": 380
},
{
"epoch": 0.046897546897546896,
"grad_norm": 0.25345709919929504,
"learning_rate": 9.992674338605297e-05,
"loss": 1.2965,
"step": 390
},
{
"epoch": 0.0481000481000481,
"grad_norm": 0.2470959722995758,
"learning_rate": 9.991582677544458e-05,
"loss": 1.29,
"step": 400
},
{
"epoch": 0.0493025493025493,
"grad_norm": 0.2398555725812912,
"learning_rate": 9.990415294666045e-05,
"loss": 1.2991,
"step": 410
},
{
"epoch": 0.050505050505050504,
"grad_norm": 0.24114194512367249,
"learning_rate": 9.98917220767914e-05,
"loss": 1.294,
"step": 420
},
{
"epoch": 0.05170755170755171,
"grad_norm": 0.24480991065502167,
"learning_rate": 9.98785343544125e-05,
"loss": 1.2827,
"step": 430
},
{
"epoch": 0.05291005291005291,
"grad_norm": 0.2351623922586441,
"learning_rate": 9.986458997958021e-05,
"loss": 1.2868,
"step": 440
},
{
"epoch": 0.05411255411255411,
"grad_norm": 0.23396646976470947,
"learning_rate": 9.984988916382932e-05,
"loss": 1.2894,
"step": 450
},
{
"epoch": 0.05531505531505532,
"grad_norm": 0.24305066466331482,
"learning_rate": 9.983443213016975e-05,
"loss": 1.2929,
"step": 460
},
{
"epoch": 0.056517556517556515,
"grad_norm": 0.24993565678596497,
"learning_rate": 9.981821911308316e-05,
"loss": 1.2809,
"step": 470
},
{
"epoch": 0.05772005772005772,
"grad_norm": 0.24196045100688934,
"learning_rate": 9.980125035851945e-05,
"loss": 1.2771,
"step": 480
},
{
"epoch": 0.058922558922558925,
"grad_norm": 0.2293768674135208,
"learning_rate": 9.978352612389298e-05,
"loss": 1.2899,
"step": 490
},
{
"epoch": 0.06012506012506012,
"grad_norm": 0.22991864383220673,
"learning_rate": 9.97650466780786e-05,
"loss": 1.2813,
"step": 500
},
{
"epoch": 0.06132756132756133,
"grad_norm": 0.24029232561588287,
"learning_rate": 9.97458123014077e-05,
"loss": 1.2816,
"step": 510
},
{
"epoch": 0.06253006253006253,
"grad_norm": 0.2242475003004074,
"learning_rate": 9.972582328566387e-05,
"loss": 1.2859,
"step": 520
},
{
"epoch": 0.06373256373256374,
"grad_norm": 0.2273736447095871,
"learning_rate": 9.97050799340785e-05,
"loss": 1.2743,
"step": 530
},
{
"epoch": 0.06493506493506493,
"grad_norm": 0.23436114192008972,
"learning_rate": 9.968358256132623e-05,
"loss": 1.2841,
"step": 540
},
{
"epoch": 0.06613756613756613,
"grad_norm": 0.22182011604309082,
"learning_rate": 9.966133149352006e-05,
"loss": 1.2933,
"step": 550
},
{
"epoch": 0.06734006734006734,
"grad_norm": 0.23829922080039978,
"learning_rate": 9.963832706820649e-05,
"loss": 1.2865,
"step": 560
},
{
"epoch": 0.06854256854256854,
"grad_norm": 0.24821604788303375,
"learning_rate": 9.96145696343604e-05,
"loss": 1.2835,
"step": 570
},
{
"epoch": 0.06974506974506975,
"grad_norm": 0.22477108240127563,
"learning_rate": 9.959005955237972e-05,
"loss": 1.2845,
"step": 580
},
{
"epoch": 0.07094757094757095,
"grad_norm": 0.23542803525924683,
"learning_rate": 9.956479719407998e-05,
"loss": 1.287,
"step": 590
},
{
"epoch": 0.07215007215007214,
"grad_norm": 0.22595302760601044,
"learning_rate": 9.953878294268866e-05,
"loss": 1.2762,
"step": 600
},
{
"epoch": 0.07335257335257335,
"grad_norm": 0.21597643196582794,
"learning_rate": 9.95120171928394e-05,
"loss": 1.283,
"step": 610
},
{
"epoch": 0.07455507455507455,
"grad_norm": 0.23060384392738342,
"learning_rate": 9.948450035056599e-05,
"loss": 1.2847,
"step": 620
},
{
"epoch": 0.07575757575757576,
"grad_norm": 0.21899910271167755,
"learning_rate": 9.945623283329621e-05,
"loss": 1.2699,
"step": 630
},
{
"epoch": 0.07696007696007696,
"grad_norm": 0.21934176981449127,
"learning_rate": 9.942721506984551e-05,
"loss": 1.274,
"step": 640
},
{
"epoch": 0.07816257816257816,
"grad_norm": 0.22260645031929016,
"learning_rate": 9.939744750041052e-05,
"loss": 1.2653,
"step": 650
},
{
"epoch": 0.07936507936507936,
"grad_norm": 0.22758053243160248,
"learning_rate": 9.936693057656231e-05,
"loss": 1.2712,
"step": 660
},
{
"epoch": 0.08056758056758057,
"grad_norm": 0.21833918988704681,
"learning_rate": 9.933566476123964e-05,
"loss": 1.2631,
"step": 670
},
{
"epoch": 0.08177008177008177,
"grad_norm": 0.2229488492012024,
"learning_rate": 9.930365052874182e-05,
"loss": 1.2632,
"step": 680
},
{
"epoch": 0.08297258297258298,
"grad_norm": 0.2153736650943756,
"learning_rate": 9.927088836472163e-05,
"loss": 1.284,
"step": 690
},
{
"epoch": 0.08417508417508418,
"grad_norm": 0.23731300234794617,
"learning_rate": 9.923737876617784e-05,
"loss": 1.2629,
"step": 700
},
{
"epoch": 0.08537758537758537,
"grad_norm": 0.22687861323356628,
"learning_rate": 9.920312224144779e-05,
"loss": 1.2757,
"step": 710
},
{
"epoch": 0.08658008658008658,
"grad_norm": 0.22836917638778687,
"learning_rate": 9.916811931019956e-05,
"loss": 1.2589,
"step": 720
},
{
"epoch": 0.08778258778258778,
"grad_norm": 0.2113468050956726,
"learning_rate": 9.91323705034242e-05,
"loss": 1.2515,
"step": 730
},
{
"epoch": 0.08898508898508899,
"grad_norm": 0.2195415198802948,
"learning_rate": 9.909587636342756e-05,
"loss": 1.2544,
"step": 740
},
{
"epoch": 0.09018759018759019,
"grad_norm": 0.2246798574924469,
"learning_rate": 9.905863744382217e-05,
"loss": 1.2503,
"step": 750
},
{
"epoch": 0.0913900913900914,
"grad_norm": 0.22869926691055298,
"learning_rate": 9.902065430951874e-05,
"loss": 1.2626,
"step": 760
},
{
"epoch": 0.09259259259259259,
"grad_norm": 0.2191305309534073,
"learning_rate": 9.898192753671774e-05,
"loss": 1.2612,
"step": 770
},
{
"epoch": 0.09379509379509379,
"grad_norm": 0.22004824876785278,
"learning_rate": 9.894245771290042e-05,
"loss": 1.2734,
"step": 780
},
{
"epoch": 0.094997594997595,
"grad_norm": 0.22499217092990875,
"learning_rate": 9.890224543682017e-05,
"loss": 1.244,
"step": 790
},
{
"epoch": 0.0962000962000962,
"grad_norm": 0.22316116094589233,
"learning_rate": 9.886129131849324e-05,
"loss": 1.2567,
"step": 800
},
{
"epoch": 0.09740259740259741,
"grad_norm": 0.23548166453838348,
"learning_rate": 9.88195959791896e-05,
"loss": 1.2649,
"step": 810
},
{
"epoch": 0.0986050986050986,
"grad_norm": 0.22244389355182648,
"learning_rate": 9.877716005142339e-05,
"loss": 1.2516,
"step": 820
},
{
"epoch": 0.0998075998075998,
"grad_norm": 0.22191107273101807,
"learning_rate": 9.873398417894348e-05,
"loss": 1.2503,
"step": 830
},
{
"epoch": 0.10101010101010101,
"grad_norm": 0.22165456414222717,
"learning_rate": 9.869006901672362e-05,
"loss": 1.2466,
"step": 840
},
{
"epoch": 0.10221260221260221,
"grad_norm": 0.2198014259338379,
"learning_rate": 9.864541523095247e-05,
"loss": 1.246,
"step": 850
},
{
"epoch": 0.10341510341510342,
"grad_norm": 0.22250819206237793,
"learning_rate": 9.860002349902358e-05,
"loss": 1.2362,
"step": 860
},
{
"epoch": 0.10461760461760462,
"grad_norm": 0.2301010638475418,
"learning_rate": 9.855389450952503e-05,
"loss": 1.255,
"step": 870
},
{
"epoch": 0.10582010582010581,
"grad_norm": 0.22151581943035126,
"learning_rate": 9.850702896222907e-05,
"loss": 1.2555,
"step": 880
},
{
"epoch": 0.10702260702260702,
"grad_norm": 0.2172437161207199,
"learning_rate": 9.845942756808145e-05,
"loss": 1.2519,
"step": 890
},
{
"epoch": 0.10822510822510822,
"grad_norm": 0.2380341738462448,
"learning_rate": 9.841109104919057e-05,
"loss": 1.2565,
"step": 900
},
{
"epoch": 0.10942760942760943,
"grad_norm": 0.2219957560300827,
"learning_rate": 9.836202013881673e-05,
"loss": 1.2472,
"step": 910
},
{
"epoch": 0.11063011063011063,
"grad_norm": 0.23652082681655884,
"learning_rate": 9.831221558136075e-05,
"loss": 1.249,
"step": 920
},
{
"epoch": 0.11183261183261184,
"grad_norm": 0.21968142688274384,
"learning_rate": 9.826167813235291e-05,
"loss": 1.2451,
"step": 930
},
{
"epoch": 0.11303511303511303,
"grad_norm": 0.22762618958950043,
"learning_rate": 9.82104085584413e-05,
"loss": 1.2433,
"step": 940
},
{
"epoch": 0.11423761423761424,
"grad_norm": 0.20969653129577637,
"learning_rate": 9.815840763738032e-05,
"loss": 1.2452,
"step": 950
},
{
"epoch": 0.11544011544011544,
"grad_norm": 0.22823800146579742,
"learning_rate": 9.810567615801883e-05,
"loss": 1.2482,
"step": 960
},
{
"epoch": 0.11664261664261664,
"grad_norm": 0.22033190727233887,
"learning_rate": 9.805221492028815e-05,
"loss": 1.2466,
"step": 970
},
{
"epoch": 0.11784511784511785,
"grad_norm": 0.22417309880256653,
"learning_rate": 9.799802473519e-05,
"loss": 1.2531,
"step": 980
},
{
"epoch": 0.11904761904761904,
"grad_norm": 0.21632517874240875,
"learning_rate": 9.794310642478418e-05,
"loss": 1.2479,
"step": 990
},
{
"epoch": 0.12025012025012025,
"grad_norm": 0.21755601465702057,
"learning_rate": 9.788746082217601e-05,
"loss": 1.2575,
"step": 1000
},
{
"epoch": 0.12025012025012025,
"eval_loss": 1.2297765016555786,
"eval_runtime": 17.3877,
"eval_samples_per_second": 14.723,
"eval_steps_per_second": 0.46,
"step": 1000
},
{
"epoch": 0.12145262145262145,
"grad_norm": 0.23050642013549805,
"learning_rate": 9.783108877150384e-05,
"loss": 1.2438,
"step": 1010
},
{
"epoch": 0.12265512265512266,
"grad_norm": 0.19902385771274567,
"learning_rate": 9.77739911279261e-05,
"loss": 1.2523,
"step": 1020
},
{
"epoch": 0.12385762385762386,
"grad_norm": 0.27783408761024475,
"learning_rate": 9.77161687576084e-05,
"loss": 1.2591,
"step": 1030
},
{
"epoch": 0.12506012506012507,
"grad_norm": 0.21347227692604065,
"learning_rate": 9.765762253771045e-05,
"loss": 1.2519,
"step": 1040
},
{
"epoch": 0.12626262626262627,
"grad_norm": 0.209860697388649,
"learning_rate": 9.75983533563726e-05,
"loss": 1.229,
"step": 1050
},
{
"epoch": 0.12746512746512748,
"grad_norm": 0.20719142258167267,
"learning_rate": 9.753836211270254e-05,
"loss": 1.2559,
"step": 1060
},
{
"epoch": 0.12866762866762868,
"grad_norm": 0.22753511369228363,
"learning_rate": 9.747764971676146e-05,
"loss": 1.2511,
"step": 1070
},
{
"epoch": 0.12987012987012986,
"grad_norm": 0.22132696211338043,
"learning_rate": 9.741621708955048e-05,
"loss": 1.2591,
"step": 1080
},
{
"epoch": 0.13107263107263106,
"grad_norm": 0.22907616198062897,
"learning_rate": 9.735406516299649e-05,
"loss": 1.2483,
"step": 1090
},
{
"epoch": 0.13227513227513227,
"grad_norm": 0.20939530432224274,
"learning_rate": 9.729119487993809e-05,
"loss": 1.2426,
"step": 1100
},
{
"epoch": 0.13347763347763347,
"grad_norm": 0.22089892625808716,
"learning_rate": 9.722760719411128e-05,
"loss": 1.2527,
"step": 1110
},
{
"epoch": 0.13468013468013468,
"grad_norm": 0.2165553718805313,
"learning_rate": 9.716330307013499e-05,
"loss": 1.2486,
"step": 1120
},
{
"epoch": 0.13588263588263588,
"grad_norm": 0.22434702515602112,
"learning_rate": 9.709828348349645e-05,
"loss": 1.244,
"step": 1130
},
{
"epoch": 0.1370851370851371,
"grad_norm": 0.22943449020385742,
"learning_rate": 9.703254942053641e-05,
"loss": 1.244,
"step": 1140
},
{
"epoch": 0.1382876382876383,
"grad_norm": 0.22439205646514893,
"learning_rate": 9.69661018784341e-05,
"loss": 1.2324,
"step": 1150
},
{
"epoch": 0.1394901394901395,
"grad_norm": 0.22962580621242523,
"learning_rate": 9.689894186519217e-05,
"loss": 1.2437,
"step": 1160
},
{
"epoch": 0.1406926406926407,
"grad_norm": 0.22367720305919647,
"learning_rate": 9.683107039962144e-05,
"loss": 1.2353,
"step": 1170
},
{
"epoch": 0.1418951418951419,
"grad_norm": 0.23102396726608276,
"learning_rate": 9.676248851132536e-05,
"loss": 1.224,
"step": 1180
},
{
"epoch": 0.14309764309764308,
"grad_norm": 0.20691992342472076,
"learning_rate": 9.669319724068441e-05,
"loss": 1.2448,
"step": 1190
},
{
"epoch": 0.1443001443001443,
"grad_norm": 0.2133265882730484,
"learning_rate": 9.662319763884035e-05,
"loss": 1.2562,
"step": 1200
},
{
"epoch": 0.1455026455026455,
"grad_norm": 0.21801921725273132,
"learning_rate": 9.655249076768023e-05,
"loss": 1.2366,
"step": 1210
},
{
"epoch": 0.1467051467051467,
"grad_norm": 0.22324581444263458,
"learning_rate": 9.648107769982033e-05,
"loss": 1.2341,
"step": 1220
},
{
"epoch": 0.1479076479076479,
"grad_norm": 0.20417769253253937,
"learning_rate": 9.640895951858984e-05,
"loss": 1.2339,
"step": 1230
},
{
"epoch": 0.1491101491101491,
"grad_norm": 0.21648766100406647,
"learning_rate": 9.63361373180145e-05,
"loss": 1.23,
"step": 1240
},
{
"epoch": 0.15031265031265031,
"grad_norm": 0.21471014618873596,
"learning_rate": 9.626261220279988e-05,
"loss": 1.2445,
"step": 1250
},
{
"epoch": 0.15151515151515152,
"grad_norm": 0.21205437183380127,
"learning_rate": 9.618838528831477e-05,
"loss": 1.2434,
"step": 1260
},
{
"epoch": 0.15271765271765272,
"grad_norm": 0.20593945682048798,
"learning_rate": 9.611345770057414e-05,
"loss": 1.2367,
"step": 1270
},
{
"epoch": 0.15392015392015393,
"grad_norm": 0.2065548151731491,
"learning_rate": 9.60378305762221e-05,
"loss": 1.2386,
"step": 1280
},
{
"epoch": 0.15512265512265513,
"grad_norm": 0.21276754140853882,
"learning_rate": 9.596150506251469e-05,
"loss": 1.2358,
"step": 1290
},
{
"epoch": 0.1563251563251563,
"grad_norm": 0.22176425158977509,
"learning_rate": 9.588448231730245e-05,
"loss": 1.235,
"step": 1300
},
{
"epoch": 0.15752765752765752,
"grad_norm": 0.21053512394428253,
"learning_rate": 9.580676350901282e-05,
"loss": 1.2195,
"step": 1310
},
{
"epoch": 0.15873015873015872,
"grad_norm": 0.22540676593780518,
"learning_rate": 9.572834981663246e-05,
"loss": 1.2313,
"step": 1320
},
{
"epoch": 0.15993265993265993,
"grad_norm": 0.20599214732646942,
"learning_rate": 9.56492424296894e-05,
"loss": 1.2177,
"step": 1330
},
{
"epoch": 0.16113516113516113,
"grad_norm": 0.2064780443906784,
"learning_rate": 9.556944254823485e-05,
"loss": 1.2278,
"step": 1340
},
{
"epoch": 0.16233766233766234,
"grad_norm": 0.2161046266555786,
"learning_rate": 9.548895138282521e-05,
"loss": 1.2369,
"step": 1350
},
{
"epoch": 0.16354016354016354,
"grad_norm": 0.2143697589635849,
"learning_rate": 9.540777015450349e-05,
"loss": 1.2163,
"step": 1360
},
{
"epoch": 0.16474266474266475,
"grad_norm": 0.20479600131511688,
"learning_rate": 9.532590009478094e-05,
"loss": 1.2185,
"step": 1370
},
{
"epoch": 0.16594516594516595,
"grad_norm": 0.2071349024772644,
"learning_rate": 9.52433424456183e-05,
"loss": 1.2449,
"step": 1380
},
{
"epoch": 0.16714766714766716,
"grad_norm": 0.22588619589805603,
"learning_rate": 9.516009845940697e-05,
"loss": 1.2395,
"step": 1390
},
{
"epoch": 0.16835016835016836,
"grad_norm": 0.21358510851860046,
"learning_rate": 9.507616939895005e-05,
"loss": 1.2259,
"step": 1400
},
{
"epoch": 0.16955266955266957,
"grad_norm": 0.21996615827083588,
"learning_rate": 9.49915565374431e-05,
"loss": 1.2333,
"step": 1410
},
{
"epoch": 0.17075517075517074,
"grad_norm": 0.22476601600646973,
"learning_rate": 9.490626115845489e-05,
"loss": 1.2375,
"step": 1420
},
{
"epoch": 0.17195767195767195,
"grad_norm": 0.21539901196956635,
"learning_rate": 9.482028455590793e-05,
"loss": 1.2351,
"step": 1430
},
{
"epoch": 0.17316017316017315,
"grad_norm": 0.21999700367450714,
"learning_rate": 9.47336280340588e-05,
"loss": 1.2217,
"step": 1440
},
{
"epoch": 0.17436267436267436,
"grad_norm": 0.20949865877628326,
"learning_rate": 9.464629290747842e-05,
"loss": 1.2099,
"step": 1450
},
{
"epoch": 0.17556517556517556,
"grad_norm": 0.21222126483917236,
"learning_rate": 9.455828050103205e-05,
"loss": 1.226,
"step": 1460
},
{
"epoch": 0.17676767676767677,
"grad_norm": 0.22125640511512756,
"learning_rate": 9.446959214985922e-05,
"loss": 1.2223,
"step": 1470
},
{
"epoch": 0.17797017797017797,
"grad_norm": 0.21773898601531982,
"learning_rate": 9.438022919935348e-05,
"loss": 1.2289,
"step": 1480
},
{
"epoch": 0.17917267917267918,
"grad_norm": 0.22156096994876862,
"learning_rate": 9.4290193005142e-05,
"loss": 1.2191,
"step": 1490
},
{
"epoch": 0.18037518037518038,
"grad_norm": 0.2058996856212616,
"learning_rate": 9.419948493306497e-05,
"loss": 1.2111,
"step": 1500
},
{
"epoch": 0.1815776815776816,
"grad_norm": 0.22363074123859406,
"learning_rate": 9.410810635915491e-05,
"loss": 1.2171,
"step": 1510
},
{
"epoch": 0.1827801827801828,
"grad_norm": 0.20594915747642517,
"learning_rate": 9.401605866961579e-05,
"loss": 1.2205,
"step": 1520
},
{
"epoch": 0.18398268398268397,
"grad_norm": 0.22017163038253784,
"learning_rate": 9.392334326080199e-05,
"loss": 1.2306,
"step": 1530
},
{
"epoch": 0.18518518518518517,
"grad_norm": 0.20129500329494476,
"learning_rate": 9.382996153919713e-05,
"loss": 1.2044,
"step": 1540
},
{
"epoch": 0.18638768638768638,
"grad_norm": 0.21580781042575836,
"learning_rate": 9.373591492139273e-05,
"loss": 1.2207,
"step": 1550
},
{
"epoch": 0.18759018759018758,
"grad_norm": 0.20488041639328003,
"learning_rate": 9.364120483406672e-05,
"loss": 1.2311,
"step": 1560
},
{
"epoch": 0.1887926887926888,
"grad_norm": 0.22156283259391785,
"learning_rate": 9.35458327139618e-05,
"loss": 1.2158,
"step": 1570
},
{
"epoch": 0.18999518999519,
"grad_norm": 0.22037768363952637,
"learning_rate": 9.344980000786363e-05,
"loss": 1.2246,
"step": 1580
},
{
"epoch": 0.1911976911976912,
"grad_norm": 0.19976097345352173,
"learning_rate": 9.33531081725789e-05,
"loss": 1.2157,
"step": 1590
},
{
"epoch": 0.1924001924001924,
"grad_norm": 0.2248261421918869,
"learning_rate": 9.325575867491323e-05,
"loss": 1.2235,
"step": 1600
},
{
"epoch": 0.1936026936026936,
"grad_norm": 0.22592051327228546,
"learning_rate": 9.315775299164892e-05,
"loss": 1.2271,
"step": 1610
},
{
"epoch": 0.19480519480519481,
"grad_norm": 0.22421322762966156,
"learning_rate": 9.305909260952254e-05,
"loss": 1.2378,
"step": 1620
},
{
"epoch": 0.19600769600769602,
"grad_norm": 0.2143516093492508,
"learning_rate": 9.295977902520235e-05,
"loss": 1.2326,
"step": 1630
},
{
"epoch": 0.1972101972101972,
"grad_norm": 0.2073206901550293,
"learning_rate": 9.285981374526564e-05,
"loss": 1.2241,
"step": 1640
},
{
"epoch": 0.1984126984126984,
"grad_norm": 0.2234421968460083,
"learning_rate": 9.275919828617589e-05,
"loss": 1.2277,
"step": 1650
},
{
"epoch": 0.1996151996151996,
"grad_norm": 0.218864306807518,
"learning_rate": 9.265793417425967e-05,
"loss": 1.2256,
"step": 1660
},
{
"epoch": 0.2008177008177008,
"grad_norm": 0.2139173001050949,
"learning_rate": 9.25560229456836e-05,
"loss": 1.2247,
"step": 1670
},
{
"epoch": 0.20202020202020202,
"grad_norm": 0.2014576643705368,
"learning_rate": 9.245346614643096e-05,
"loss": 1.2215,
"step": 1680
},
{
"epoch": 0.20322270322270322,
"grad_norm": 0.22390897572040558,
"learning_rate": 9.235026533227833e-05,
"loss": 1.2273,
"step": 1690
},
{
"epoch": 0.20442520442520443,
"grad_norm": 0.2203395515680313,
"learning_rate": 9.224642206877187e-05,
"loss": 1.2067,
"step": 1700
},
{
"epoch": 0.20562770562770563,
"grad_norm": 0.22504153847694397,
"learning_rate": 9.214193793120368e-05,
"loss": 1.2285,
"step": 1710
},
{
"epoch": 0.20683020683020684,
"grad_norm": 0.2131216675043106,
"learning_rate": 9.203681450458781e-05,
"loss": 1.2239,
"step": 1720
},
{
"epoch": 0.20803270803270804,
"grad_norm": 0.2105296105146408,
"learning_rate": 9.19310533836363e-05,
"loss": 1.2145,
"step": 1730
},
{
"epoch": 0.20923520923520925,
"grad_norm": 0.2101157158613205,
"learning_rate": 9.182465617273494e-05,
"loss": 1.2134,
"step": 1740
},
{
"epoch": 0.21043771043771045,
"grad_norm": 0.20046111941337585,
"learning_rate": 9.171762448591894e-05,
"loss": 1.2207,
"step": 1750
},
{
"epoch": 0.21164021164021163,
"grad_norm": 0.232273668050766,
"learning_rate": 9.160995994684845e-05,
"loss": 1.2315,
"step": 1760
},
{
"epoch": 0.21284271284271283,
"grad_norm": 0.21907658874988556,
"learning_rate": 9.15016641887839e-05,
"loss": 1.2183,
"step": 1770
},
{
"epoch": 0.21404521404521404,
"grad_norm": 0.20053844153881073,
"learning_rate": 9.139273885456132e-05,
"loss": 1.2187,
"step": 1780
},
{
"epoch": 0.21524771524771524,
"grad_norm": 0.2047508955001831,
"learning_rate": 9.128318559656725e-05,
"loss": 1.2022,
"step": 1790
},
{
"epoch": 0.21645021645021645,
"grad_norm": 0.21334955096244812,
"learning_rate": 9.117300607671386e-05,
"loss": 1.1995,
"step": 1800
},
{
"epoch": 0.21765271765271765,
"grad_norm": 0.20535485446453094,
"learning_rate": 9.106220196641357e-05,
"loss": 1.2255,
"step": 1810
},
{
"epoch": 0.21885521885521886,
"grad_norm": 0.20729170739650726,
"learning_rate": 9.095077494655388e-05,
"loss": 1.2186,
"step": 1820
},
{
"epoch": 0.22005772005772006,
"grad_norm": 0.20496122539043427,
"learning_rate": 9.083872670747166e-05,
"loss": 1.2024,
"step": 1830
},
{
"epoch": 0.22126022126022127,
"grad_norm": 0.19766771793365479,
"learning_rate": 9.07260589489277e-05,
"loss": 1.2097,
"step": 1840
},
{
"epoch": 0.22246272246272247,
"grad_norm": 0.21484710276126862,
"learning_rate": 9.061277338008077e-05,
"loss": 1.2311,
"step": 1850
},
{
"epoch": 0.22366522366522368,
"grad_norm": 0.206639364361763,
"learning_rate": 9.049887171946179e-05,
"loss": 1.2167,
"step": 1860
},
{
"epoch": 0.22486772486772486,
"grad_norm": 0.2012685239315033,
"learning_rate": 9.038435569494773e-05,
"loss": 1.2172,
"step": 1870
},
{
"epoch": 0.22607022607022606,
"grad_norm": 0.2186802476644516,
"learning_rate": 9.026922704373543e-05,
"loss": 1.2147,
"step": 1880
},
{
"epoch": 0.22727272727272727,
"grad_norm": 0.19656942784786224,
"learning_rate": 9.015348751231517e-05,
"loss": 1.2212,
"step": 1890
},
{
"epoch": 0.22847522847522847,
"grad_norm": 0.20295904576778412,
"learning_rate": 9.003713885644425e-05,
"loss": 1.2021,
"step": 1900
},
{
"epoch": 0.22967772967772968,
"grad_norm": 0.2020638883113861,
"learning_rate": 8.99201828411203e-05,
"loss": 1.2098,
"step": 1910
},
{
"epoch": 0.23088023088023088,
"grad_norm": 0.2027895748615265,
"learning_rate": 8.980262124055458e-05,
"loss": 1.2116,
"step": 1920
},
{
"epoch": 0.23208273208273208,
"grad_norm": 0.1999751627445221,
"learning_rate": 8.968445583814497e-05,
"loss": 1.2056,
"step": 1930
},
{
"epoch": 0.2332852332852333,
"grad_norm": 0.20799754559993744,
"learning_rate": 8.956568842644901e-05,
"loss": 1.2088,
"step": 1940
},
{
"epoch": 0.2344877344877345,
"grad_norm": 0.21467134356498718,
"learning_rate": 8.944632080715662e-05,
"loss": 1.2239,
"step": 1950
},
{
"epoch": 0.2356902356902357,
"grad_norm": 0.2006864845752716,
"learning_rate": 8.932635479106283e-05,
"loss": 1.203,
"step": 1960
},
{
"epoch": 0.2368927368927369,
"grad_norm": 0.20447371900081635,
"learning_rate": 8.920579219804028e-05,
"loss": 1.222,
"step": 1970
},
{
"epoch": 0.23809523809523808,
"grad_norm": 0.21055077016353607,
"learning_rate": 8.908463485701164e-05,
"loss": 1.2173,
"step": 1980
},
{
"epoch": 0.2392977392977393,
"grad_norm": 0.2071334421634674,
"learning_rate": 8.896288460592186e-05,
"loss": 1.2014,
"step": 1990
},
{
"epoch": 0.2405002405002405,
"grad_norm": 0.21648217737674713,
"learning_rate": 8.884054329171021e-05,
"loss": 1.2085,
"step": 2000
},
{
"epoch": 0.2405002405002405,
"eval_loss": 1.1967148780822754,
"eval_runtime": 14.3237,
"eval_samples_per_second": 17.872,
"eval_steps_per_second": 0.559,
"step": 2000
},
{
"epoch": 0.2417027417027417,
"grad_norm": 0.20826809108257294,
"learning_rate": 8.871761277028243e-05,
"loss": 1.2277,
"step": 2010
},
{
"epoch": 0.2429052429052429,
"grad_norm": 0.2014724761247635,
"learning_rate": 8.859409490648238e-05,
"loss": 1.2081,
"step": 2020
},
{
"epoch": 0.2441077441077441,
"grad_norm": 0.21313191950321198,
"learning_rate": 8.84699915740639e-05,
"loss": 1.2047,
"step": 2030
},
{
"epoch": 0.2453102453102453,
"grad_norm": 0.21019341051578522,
"learning_rate": 8.834530465566233e-05,
"loss": 1.2178,
"step": 2040
},
{
"epoch": 0.24651274651274652,
"grad_norm": 0.2147923707962036,
"learning_rate": 8.822003604276595e-05,
"loss": 1.2189,
"step": 2050
},
{
"epoch": 0.24771524771524772,
"grad_norm": 0.214359313249588,
"learning_rate": 8.809418763568725e-05,
"loss": 1.2094,
"step": 2060
},
{
"epoch": 0.24891774891774893,
"grad_norm": 0.21459384262561798,
"learning_rate": 8.796776134353418e-05,
"loss": 1.216,
"step": 2070
},
{
"epoch": 0.25012025012025013,
"grad_norm": 0.2167990505695343,
"learning_rate": 8.784075908418115e-05,
"loss": 1.2014,
"step": 2080
},
{
"epoch": 0.25132275132275134,
"grad_norm": 0.2073349505662918,
"learning_rate": 8.771318278423992e-05,
"loss": 1.2033,
"step": 2090
},
{
"epoch": 0.25252525252525254,
"grad_norm": 0.20970456302165985,
"learning_rate": 8.758503437903038e-05,
"loss": 1.1972,
"step": 2100
},
{
"epoch": 0.25372775372775375,
"grad_norm": 0.19900967180728912,
"learning_rate": 8.745631581255125e-05,
"loss": 1.1896,
"step": 2110
},
{
"epoch": 0.25493025493025495,
"grad_norm": 0.21769461035728455,
"learning_rate": 8.732702903745048e-05,
"loss": 1.213,
"step": 2120
},
{
"epoch": 0.25613275613275616,
"grad_norm": 0.21593014895915985,
"learning_rate": 8.719717601499571e-05,
"loss": 1.2076,
"step": 2130
},
{
"epoch": 0.25733525733525736,
"grad_norm": 0.20195241272449493,
"learning_rate": 8.706675871504451e-05,
"loss": 1.2142,
"step": 2140
},
{
"epoch": 0.2585377585377585,
"grad_norm": 0.2088335007429123,
"learning_rate": 8.693577911601447e-05,
"loss": 1.2198,
"step": 2150
},
{
"epoch": 0.2597402597402597,
"grad_norm": 0.21312519907951355,
"learning_rate": 8.680423920485321e-05,
"loss": 1.2187,
"step": 2160
},
{
"epoch": 0.2609427609427609,
"grad_norm": 0.20363889634609222,
"learning_rate": 8.667214097700824e-05,
"loss": 1.1993,
"step": 2170
},
{
"epoch": 0.2621452621452621,
"grad_norm": 0.21147218346595764,
"learning_rate": 8.653948643639665e-05,
"loss": 1.2278,
"step": 2180
},
{
"epoch": 0.26334776334776333,
"grad_norm": 0.1997493952512741,
"learning_rate": 8.640627759537475e-05,
"loss": 1.2036,
"step": 2190
},
{
"epoch": 0.26455026455026454,
"grad_norm": 0.21293872594833374,
"learning_rate": 8.627251647470756e-05,
"loss": 1.2096,
"step": 2200
},
{
"epoch": 0.26575276575276574,
"grad_norm": 0.19573794305324554,
"learning_rate": 8.613820510353807e-05,
"loss": 1.2034,
"step": 2210
},
{
"epoch": 0.26695526695526695,
"grad_norm": 0.21020345389842987,
"learning_rate": 8.600334551935658e-05,
"loss": 1.2207,
"step": 2220
},
{
"epoch": 0.26815776815776815,
"grad_norm": 0.20261149108409882,
"learning_rate": 8.58679397679697e-05,
"loss": 1.2128,
"step": 2230
},
{
"epoch": 0.26936026936026936,
"grad_norm": 0.20684467256069183,
"learning_rate": 8.573198990346933e-05,
"loss": 1.1972,
"step": 2240
},
{
"epoch": 0.27056277056277056,
"grad_norm": 0.19626250863075256,
"learning_rate": 8.559549798820153e-05,
"loss": 1.1957,
"step": 2250
},
{
"epoch": 0.27176527176527177,
"grad_norm": 0.20869341492652893,
"learning_rate": 8.545846609273522e-05,
"loss": 1.2164,
"step": 2260
},
{
"epoch": 0.27296777296777297,
"grad_norm": 0.21097266674041748,
"learning_rate": 8.532089629583073e-05,
"loss": 1.2131,
"step": 2270
},
{
"epoch": 0.2741702741702742,
"grad_norm": 0.2031770646572113,
"learning_rate": 8.518279068440835e-05,
"loss": 1.206,
"step": 2280
},
{
"epoch": 0.2753727753727754,
"grad_norm": 0.21206524968147278,
"learning_rate": 8.50441513535166e-05,
"loss": 1.2118,
"step": 2290
},
{
"epoch": 0.2765752765752766,
"grad_norm": 0.19096438586711884,
"learning_rate": 8.490498040630048e-05,
"loss": 1.2009,
"step": 2300
},
{
"epoch": 0.2777777777777778,
"grad_norm": 0.20884640514850616,
"learning_rate": 8.476527995396953e-05,
"loss": 1.1908,
"step": 2310
},
{
"epoch": 0.278980278980279,
"grad_norm": 0.20409995317459106,
"learning_rate": 8.462505211576588e-05,
"loss": 1.1933,
"step": 2320
},
{
"epoch": 0.2801827801827802,
"grad_norm": 0.2128736823797226,
"learning_rate": 8.4484299018932e-05,
"loss": 1.207,
"step": 2330
},
{
"epoch": 0.2813852813852814,
"grad_norm": 0.2101830691099167,
"learning_rate": 8.434302279867852e-05,
"loss": 1.1973,
"step": 2340
},
{
"epoch": 0.2825877825877826,
"grad_norm": 0.19666483998298645,
"learning_rate": 8.420122559815177e-05,
"loss": 1.1878,
"step": 2350
},
{
"epoch": 0.2837902837902838,
"grad_norm": 0.1945558786392212,
"learning_rate": 8.405890956840135e-05,
"loss": 1.2162,
"step": 2360
},
{
"epoch": 0.284992784992785,
"grad_norm": 0.2024092972278595,
"learning_rate": 8.39160768683474e-05,
"loss": 1.2096,
"step": 2370
},
{
"epoch": 0.28619528619528617,
"grad_norm": 0.2207275927066803,
"learning_rate": 8.377272966474788e-05,
"loss": 1.2031,
"step": 2380
},
{
"epoch": 0.2873977873977874,
"grad_norm": 0.1977287232875824,
"learning_rate": 8.362887013216578e-05,
"loss": 1.1946,
"step": 2390
},
{
"epoch": 0.2886002886002886,
"grad_norm": 0.19938814640045166,
"learning_rate": 8.348450045293604e-05,
"loss": 1.2117,
"step": 2400
},
{
"epoch": 0.2898027898027898,
"grad_norm": 0.21072271466255188,
"learning_rate": 8.333962281713246e-05,
"loss": 1.2087,
"step": 2410
},
{
"epoch": 0.291005291005291,
"grad_norm": 0.2154613584280014,
"learning_rate": 8.319423942253451e-05,
"loss": 1.1915,
"step": 2420
},
{
"epoch": 0.2922077922077922,
"grad_norm": 0.20417426526546478,
"learning_rate": 8.304835247459397e-05,
"loss": 1.1966,
"step": 2430
},
{
"epoch": 0.2934102934102934,
"grad_norm": 0.21270157396793365,
"learning_rate": 8.290196418640148e-05,
"loss": 1.1878,
"step": 2440
},
{
"epoch": 0.2946127946127946,
"grad_norm": 0.19687621295452118,
"learning_rate": 8.275507677865295e-05,
"loss": 1.2036,
"step": 2450
},
{
"epoch": 0.2958152958152958,
"grad_norm": 0.19195468723773956,
"learning_rate": 8.260769247961586e-05,
"loss": 1.1901,
"step": 2460
},
{
"epoch": 0.297017797017797,
"grad_norm": 0.2017616629600525,
"learning_rate": 8.245981352509555e-05,
"loss": 1.2007,
"step": 2470
},
{
"epoch": 0.2982202982202982,
"grad_norm": 0.21277114748954773,
"learning_rate": 8.231144215840117e-05,
"loss": 1.1899,
"step": 2480
},
{
"epoch": 0.2994227994227994,
"grad_norm": 0.21405024826526642,
"learning_rate": 8.216258063031175e-05,
"loss": 1.2029,
"step": 2490
},
{
"epoch": 0.30062530062530063,
"grad_norm": 0.20833241939544678,
"learning_rate": 8.201323119904201e-05,
"loss": 1.2087,
"step": 2500
},
{
"epoch": 0.30182780182780183,
"grad_norm": 0.20577466487884521,
"learning_rate": 8.186339613020809e-05,
"loss": 1.1951,
"step": 2510
},
{
"epoch": 0.30303030303030304,
"grad_norm": 0.20994292199611664,
"learning_rate": 8.171307769679326e-05,
"loss": 1.2139,
"step": 2520
},
{
"epoch": 0.30423280423280424,
"grad_norm": 0.2054295688867569,
"learning_rate": 8.156227817911333e-05,
"loss": 1.2085,
"step": 2530
},
{
"epoch": 0.30543530543530545,
"grad_norm": 0.1991153508424759,
"learning_rate": 8.141099986478212e-05,
"loss": 1.1936,
"step": 2540
},
{
"epoch": 0.30663780663780665,
"grad_norm": 0.20854881405830383,
"learning_rate": 8.125924504867679e-05,
"loss": 1.204,
"step": 2550
},
{
"epoch": 0.30784030784030786,
"grad_norm": 0.20640797913074493,
"learning_rate": 8.110701603290292e-05,
"loss": 1.1792,
"step": 2560
},
{
"epoch": 0.30904280904280906,
"grad_norm": 0.20775394141674042,
"learning_rate": 8.09543151267597e-05,
"loss": 1.1927,
"step": 2570
},
{
"epoch": 0.31024531024531027,
"grad_norm": 0.21143817901611328,
"learning_rate": 8.080114464670485e-05,
"loss": 1.1949,
"step": 2580
},
{
"epoch": 0.3114478114478115,
"grad_norm": 0.2006131410598755,
"learning_rate": 8.064750691631944e-05,
"loss": 1.192,
"step": 2590
},
{
"epoch": 0.3126503126503126,
"grad_norm": 0.2078796625137329,
"learning_rate": 8.049340426627274e-05,
"loss": 1.2008,
"step": 2600
},
{
"epoch": 0.31385281385281383,
"grad_norm": 0.20483560860157013,
"learning_rate": 8.033883903428674e-05,
"loss": 1.2157,
"step": 2610
},
{
"epoch": 0.31505531505531503,
"grad_norm": 0.2086118459701538,
"learning_rate": 8.018381356510079e-05,
"loss": 1.1891,
"step": 2620
},
{
"epoch": 0.31625781625781624,
"grad_norm": 0.20505578815937042,
"learning_rate": 8.0028330210436e-05,
"loss": 1.2005,
"step": 2630
},
{
"epoch": 0.31746031746031744,
"grad_norm": 0.2115529179573059,
"learning_rate": 7.987239132895954e-05,
"loss": 1.2056,
"step": 2640
},
{
"epoch": 0.31866281866281865,
"grad_norm": 0.19756314158439636,
"learning_rate": 7.97159992862489e-05,
"loss": 1.2031,
"step": 2650
},
{
"epoch": 0.31986531986531985,
"grad_norm": 0.20881183445453644,
"learning_rate": 7.955915645475593e-05,
"loss": 1.1852,
"step": 2660
},
{
"epoch": 0.32106782106782106,
"grad_norm": 0.21202848851680756,
"learning_rate": 7.940186521377097e-05,
"loss": 1.1975,
"step": 2670
},
{
"epoch": 0.32227032227032226,
"grad_norm": 0.20970021188259125,
"learning_rate": 7.924412794938664e-05,
"loss": 1.1986,
"step": 2680
},
{
"epoch": 0.32347282347282347,
"grad_norm": 0.2039441466331482,
"learning_rate": 7.90859470544617e-05,
"loss": 1.2239,
"step": 2690
},
{
"epoch": 0.3246753246753247,
"grad_norm": 0.2211122065782547,
"learning_rate": 7.892732492858474e-05,
"loss": 1.1958,
"step": 2700
},
{
"epoch": 0.3258778258778259,
"grad_norm": 0.20219026505947113,
"learning_rate": 7.876826397803781e-05,
"loss": 1.2029,
"step": 2710
},
{
"epoch": 0.3270803270803271,
"grad_norm": 0.197218120098114,
"learning_rate": 7.860876661575985e-05,
"loss": 1.1929,
"step": 2720
},
{
"epoch": 0.3282828282828283,
"grad_norm": 0.20465494692325592,
"learning_rate": 7.844883526131013e-05,
"loss": 1.198,
"step": 2730
},
{
"epoch": 0.3294853294853295,
"grad_norm": 0.22161713242530823,
"learning_rate": 7.828847234083153e-05,
"loss": 1.1981,
"step": 2740
},
{
"epoch": 0.3306878306878307,
"grad_norm": 0.20785829424858093,
"learning_rate": 7.81276802870138e-05,
"loss": 1.1915,
"step": 2750
},
{
"epoch": 0.3318903318903319,
"grad_norm": 0.20283977687358856,
"learning_rate": 7.796646153905653e-05,
"loss": 1.1856,
"step": 2760
},
{
"epoch": 0.3330928330928331,
"grad_norm": 0.20320013165473938,
"learning_rate": 7.780481854263225e-05,
"loss": 1.1918,
"step": 2770
},
{
"epoch": 0.3342953342953343,
"grad_norm": 0.20670950412750244,
"learning_rate": 7.76427537498493e-05,
"loss": 1.1965,
"step": 2780
},
{
"epoch": 0.3354978354978355,
"grad_norm": 0.199970081448555,
"learning_rate": 7.748026961921465e-05,
"loss": 1.2006,
"step": 2790
},
{
"epoch": 0.3367003367003367,
"grad_norm": 0.2103297859430313,
"learning_rate": 7.731736861559651e-05,
"loss": 1.1927,
"step": 2800
},
{
"epoch": 0.3379028379028379,
"grad_norm": 0.2028837651014328,
"learning_rate": 7.715405321018707e-05,
"loss": 1.2001,
"step": 2810
},
{
"epoch": 0.33910533910533913,
"grad_norm": 0.20478375256061554,
"learning_rate": 7.699032588046498e-05,
"loss": 1.1841,
"step": 2820
},
{
"epoch": 0.3403078403078403,
"grad_norm": 0.20195071399211884,
"learning_rate": 7.682618911015767e-05,
"loss": 1.1666,
"step": 2830
},
{
"epoch": 0.3415103415103415,
"grad_norm": 0.21081767976284027,
"learning_rate": 7.666164538920378e-05,
"loss": 1.1951,
"step": 2840
},
{
"epoch": 0.3427128427128427,
"grad_norm": 0.2054494321346283,
"learning_rate": 7.649669721371537e-05,
"loss": 1.2028,
"step": 2850
},
{
"epoch": 0.3439153439153439,
"grad_norm": 0.20955149829387665,
"learning_rate": 7.633134708594001e-05,
"loss": 1.2108,
"step": 2860
},
{
"epoch": 0.3451178451178451,
"grad_norm": 0.2094833254814148,
"learning_rate": 7.616559751422285e-05,
"loss": 1.2022,
"step": 2870
},
{
"epoch": 0.3463203463203463,
"grad_norm": 0.20287872850894928,
"learning_rate": 7.599945101296856e-05,
"loss": 1.202,
"step": 2880
},
{
"epoch": 0.3475228475228475,
"grad_norm": 0.2033422887325287,
"learning_rate": 7.583291010260321e-05,
"loss": 1.1895,
"step": 2890
},
{
"epoch": 0.3487253487253487,
"grad_norm": 0.20638012886047363,
"learning_rate": 7.566597730953602e-05,
"loss": 1.1981,
"step": 2900
},
{
"epoch": 0.3499278499278499,
"grad_norm": 0.19885151088237762,
"learning_rate": 7.549865516612098e-05,
"loss": 1.1895,
"step": 2910
},
{
"epoch": 0.3511303511303511,
"grad_norm": 0.20345020294189453,
"learning_rate": 7.533094621061854e-05,
"loss": 1.1855,
"step": 2920
},
{
"epoch": 0.35233285233285233,
"grad_norm": 0.19838982820510864,
"learning_rate": 7.516285298715706e-05,
"loss": 1.1892,
"step": 2930
},
{
"epoch": 0.35353535353535354,
"grad_norm": 0.1952482908964157,
"learning_rate": 7.499437804569416e-05,
"loss": 1.174,
"step": 2940
},
{
"epoch": 0.35473785473785474,
"grad_norm": 0.20667259395122528,
"learning_rate": 7.482552394197812e-05,
"loss": 1.1913,
"step": 2950
},
{
"epoch": 0.35594035594035595,
"grad_norm": 0.2147095650434494,
"learning_rate": 7.465629323750905e-05,
"loss": 1.1997,
"step": 2960
},
{
"epoch": 0.35714285714285715,
"grad_norm": 0.21083101630210876,
"learning_rate": 7.448668849950008e-05,
"loss": 1.2023,
"step": 2970
},
{
"epoch": 0.35834535834535836,
"grad_norm": 0.2125108540058136,
"learning_rate": 7.431671230083836e-05,
"loss": 1.1925,
"step": 2980
},
{
"epoch": 0.35954785954785956,
"grad_norm": 0.20733962953090668,
"learning_rate": 7.414636722004614e-05,
"loss": 1.1871,
"step": 2990
},
{
"epoch": 0.36075036075036077,
"grad_norm": 0.20381608605384827,
"learning_rate": 7.397565584124144e-05,
"loss": 1.1913,
"step": 3000
},
{
"epoch": 0.36075036075036077,
"eval_loss": 1.1786988973617554,
"eval_runtime": 14.3861,
"eval_samples_per_second": 17.795,
"eval_steps_per_second": 0.556,
"step": 3000
},
{
"epoch": 0.36195286195286197,
"grad_norm": 0.19937773048877716,
"learning_rate": 7.380458075409912e-05,
"loss": 1.1869,
"step": 3010
},
{
"epoch": 0.3631553631553632,
"grad_norm": 0.20516780018806458,
"learning_rate": 7.363314455381136e-05,
"loss": 1.175,
"step": 3020
},
{
"epoch": 0.3643578643578644,
"grad_norm": 0.20108605921268463,
"learning_rate": 7.346134984104846e-05,
"loss": 1.1788,
"step": 3030
},
{
"epoch": 0.3655603655603656,
"grad_norm": 0.20104803144931793,
"learning_rate": 7.328919922191928e-05,
"loss": 1.1832,
"step": 3040
},
{
"epoch": 0.3667628667628668,
"grad_norm": 0.21416722238063812,
"learning_rate": 7.311669530793176e-05,
"loss": 1.1949,
"step": 3050
},
{
"epoch": 0.36796536796536794,
"grad_norm": 0.19773589074611664,
"learning_rate": 7.29438407159533e-05,
"loss": 1.1887,
"step": 3060
},
{
"epoch": 0.36916786916786914,
"grad_norm": 0.2011365294456482,
"learning_rate": 7.277063806817102e-05,
"loss": 1.1924,
"step": 3070
},
{
"epoch": 0.37037037037037035,
"grad_norm": 0.19436123967170715,
"learning_rate": 7.259708999205203e-05,
"loss": 1.1853,
"step": 3080
},
{
"epoch": 0.37157287157287155,
"grad_norm": 0.20969006419181824,
"learning_rate": 7.242319912030356e-05,
"loss": 1.1869,
"step": 3090
},
{
"epoch": 0.37277537277537276,
"grad_norm": 0.19676007330417633,
"learning_rate": 7.224896809083297e-05,
"loss": 1.194,
"step": 3100
},
{
"epoch": 0.37397787397787396,
"grad_norm": 0.20664817094802856,
"learning_rate": 7.207439954670784e-05,
"loss": 1.1919,
"step": 3110
},
{
"epoch": 0.37518037518037517,
"grad_norm": 0.19718950986862183,
"learning_rate": 7.189949613611582e-05,
"loss": 1.1814,
"step": 3120
},
{
"epoch": 0.3763828763828764,
"grad_norm": 0.19568046927452087,
"learning_rate": 7.172426051232437e-05,
"loss": 1.1958,
"step": 3130
},
{
"epoch": 0.3775853775853776,
"grad_norm": 0.21798531711101532,
"learning_rate": 7.154869533364067e-05,
"loss": 1.1898,
"step": 3140
},
{
"epoch": 0.3787878787878788,
"grad_norm": 0.20783917605876923,
"learning_rate": 7.13728032633712e-05,
"loss": 1.1806,
"step": 3150
},
{
"epoch": 0.37999037999038,
"grad_norm": 0.20036672055721283,
"learning_rate": 7.119658696978132e-05,
"loss": 1.1902,
"step": 3160
},
{
"epoch": 0.3811928811928812,
"grad_norm": 0.20404507219791412,
"learning_rate": 7.102004912605485e-05,
"loss": 1.1825,
"step": 3170
},
{
"epoch": 0.3823953823953824,
"grad_norm": 0.21140117943286896,
"learning_rate": 7.084319241025351e-05,
"loss": 1.1956,
"step": 3180
},
{
"epoch": 0.3835978835978836,
"grad_norm": 0.1950325220823288,
"learning_rate": 7.066601950527625e-05,
"loss": 1.1872,
"step": 3190
},
{
"epoch": 0.3848003848003848,
"grad_norm": 0.20742125809192657,
"learning_rate": 7.048853309881858e-05,
"loss": 1.1929,
"step": 3200
},
{
"epoch": 0.386002886002886,
"grad_norm": 0.21259550750255585,
"learning_rate": 7.031073588333179e-05,
"loss": 1.1935,
"step": 3210
},
{
"epoch": 0.3872053872053872,
"grad_norm": 0.2075534164905548,
"learning_rate": 7.013263055598216e-05,
"loss": 1.1821,
"step": 3220
},
{
"epoch": 0.3884078884078884,
"grad_norm": 0.19778668880462646,
"learning_rate": 6.995421981860994e-05,
"loss": 1.178,
"step": 3230
},
{
"epoch": 0.38961038961038963,
"grad_norm": 0.19642433524131775,
"learning_rate": 6.977550637768845e-05,
"loss": 1.1993,
"step": 3240
},
{
"epoch": 0.39081289081289083,
"grad_norm": 0.2102692574262619,
"learning_rate": 6.959649294428301e-05,
"loss": 1.192,
"step": 3250
},
{
"epoch": 0.39201539201539204,
"grad_norm": 0.20500397682189941,
"learning_rate": 6.941718223400975e-05,
"loss": 1.1994,
"step": 3260
},
{
"epoch": 0.39321789321789324,
"grad_norm": 0.21970514953136444,
"learning_rate": 6.923757696699446e-05,
"loss": 1.1792,
"step": 3270
},
{
"epoch": 0.3944203944203944,
"grad_norm": 0.19493255019187927,
"learning_rate": 6.90576798678314e-05,
"loss": 1.1938,
"step": 3280
},
{
"epoch": 0.3956228956228956,
"grad_norm": 0.19722020626068115,
"learning_rate": 6.887749366554182e-05,
"loss": 1.1904,
"step": 3290
},
{
"epoch": 0.3968253968253968,
"grad_norm": 0.21302542090415955,
"learning_rate": 6.869702109353264e-05,
"loss": 1.1888,
"step": 3300
},
{
"epoch": 0.398027898027898,
"grad_norm": 0.20728740096092224,
"learning_rate": 6.851626488955505e-05,
"loss": 1.2069,
"step": 3310
},
{
"epoch": 0.3992303992303992,
"grad_norm": 0.2085309624671936,
"learning_rate": 6.833522779566281e-05,
"loss": 1.184,
"step": 3320
},
{
"epoch": 0.4004329004329004,
"grad_norm": 0.2124137580394745,
"learning_rate": 6.815391255817086e-05,
"loss": 1.1877,
"step": 3330
},
{
"epoch": 0.4016354016354016,
"grad_norm": 0.2014608383178711,
"learning_rate": 6.797232192761345e-05,
"loss": 1.1776,
"step": 3340
},
{
"epoch": 0.40283790283790283,
"grad_norm": 0.19843342900276184,
"learning_rate": 6.779045865870256e-05,
"loss": 1.1966,
"step": 3350
},
{
"epoch": 0.40404040404040403,
"grad_norm": 0.20755374431610107,
"learning_rate": 6.760832551028609e-05,
"loss": 1.1876,
"step": 3360
},
{
"epoch": 0.40524290524290524,
"grad_norm": 0.19694215059280396,
"learning_rate": 6.742592524530595e-05,
"loss": 1.1788,
"step": 3370
},
{
"epoch": 0.40644540644540644,
"grad_norm": 0.20904555916786194,
"learning_rate": 6.72432606307562e-05,
"loss": 1.1818,
"step": 3380
},
{
"epoch": 0.40764790764790765,
"grad_norm": 0.21066156029701233,
"learning_rate": 6.706033443764104e-05,
"loss": 1.1899,
"step": 3390
},
{
"epoch": 0.40885040885040885,
"grad_norm": 0.20113535225391388,
"learning_rate": 6.687714944093282e-05,
"loss": 1.1867,
"step": 3400
},
{
"epoch": 0.41005291005291006,
"grad_norm": 0.198592409491539,
"learning_rate": 6.669370841952987e-05,
"loss": 1.186,
"step": 3410
},
{
"epoch": 0.41125541125541126,
"grad_norm": 0.2045023888349533,
"learning_rate": 6.651001415621446e-05,
"loss": 1.1737,
"step": 3420
},
{
"epoch": 0.41245791245791247,
"grad_norm": 0.20362548530101776,
"learning_rate": 6.632606943761042e-05,
"loss": 1.1602,
"step": 3430
},
{
"epoch": 0.4136604136604137,
"grad_norm": 0.20577895641326904,
"learning_rate": 6.614187705414105e-05,
"loss": 1.1885,
"step": 3440
},
{
"epoch": 0.4148629148629149,
"grad_norm": 0.202180415391922,
"learning_rate": 6.595743979998668e-05,
"loss": 1.1863,
"step": 3450
},
{
"epoch": 0.4160654160654161,
"grad_norm": 0.19655773043632507,
"learning_rate": 6.577276047304222e-05,
"loss": 1.1767,
"step": 3460
},
{
"epoch": 0.4172679172679173,
"grad_norm": 0.1869806945323944,
"learning_rate": 6.558784187487494e-05,
"loss": 1.1911,
"step": 3470
},
{
"epoch": 0.4184704184704185,
"grad_norm": 0.19990621507167816,
"learning_rate": 6.540268681068174e-05,
"loss": 1.1714,
"step": 3480
},
{
"epoch": 0.4196729196729197,
"grad_norm": 0.2027176469564438,
"learning_rate": 6.521729808924668e-05,
"loss": 1.1753,
"step": 3490
},
{
"epoch": 0.4208754208754209,
"grad_norm": 0.19918115437030792,
"learning_rate": 6.503167852289844e-05,
"loss": 1.1862,
"step": 3500
},
{
"epoch": 0.42207792207792205,
"grad_norm": 0.20005355775356293,
"learning_rate": 6.484583092746753e-05,
"loss": 1.1848,
"step": 3510
},
{
"epoch": 0.42328042328042326,
"grad_norm": 0.19497598707675934,
"learning_rate": 6.46597581222437e-05,
"loss": 1.1677,
"step": 3520
},
{
"epoch": 0.42448292448292446,
"grad_norm": 0.21364623308181763,
"learning_rate": 6.447346292993307e-05,
"loss": 1.1889,
"step": 3530
},
{
"epoch": 0.42568542568542567,
"grad_norm": 0.1999025195837021,
"learning_rate": 6.428694817661531e-05,
"loss": 1.1833,
"step": 3540
},
{
"epoch": 0.42688792688792687,
"grad_norm": 0.20487244427204132,
"learning_rate": 6.410021669170093e-05,
"loss": 1.189,
"step": 3550
},
{
"epoch": 0.4280904280904281,
"grad_norm": 0.20337916910648346,
"learning_rate": 6.391327130788813e-05,
"loss": 1.1849,
"step": 3560
},
{
"epoch": 0.4292929292929293,
"grad_norm": 0.20541474223136902,
"learning_rate": 6.372611486111994e-05,
"loss": 1.1879,
"step": 3570
},
{
"epoch": 0.4304954304954305,
"grad_norm": 0.1998613476753235,
"learning_rate": 6.353875019054128e-05,
"loss": 1.1795,
"step": 3580
},
{
"epoch": 0.4316979316979317,
"grad_norm": 0.19440466165542603,
"learning_rate": 6.335118013845572e-05,
"loss": 1.1865,
"step": 3590
},
{
"epoch": 0.4329004329004329,
"grad_norm": 0.2010820358991623,
"learning_rate": 6.316340755028247e-05,
"loss": 1.184,
"step": 3600
},
{
"epoch": 0.4341029341029341,
"grad_norm": 0.20916162431240082,
"learning_rate": 6.297543527451322e-05,
"loss": 1.1758,
"step": 3610
},
{
"epoch": 0.4353054353054353,
"grad_norm": 0.20128333568572998,
"learning_rate": 6.278726616266884e-05,
"loss": 1.1753,
"step": 3620
},
{
"epoch": 0.4365079365079365,
"grad_norm": 0.20334778726100922,
"learning_rate": 6.259890306925627e-05,
"loss": 1.1811,
"step": 3630
},
{
"epoch": 0.4377104377104377,
"grad_norm": 0.21417754888534546,
"learning_rate": 6.241034885172505e-05,
"loss": 1.1873,
"step": 3640
},
{
"epoch": 0.4389129389129389,
"grad_norm": 0.2282254844903946,
"learning_rate": 6.22216063704241e-05,
"loss": 1.1857,
"step": 3650
},
{
"epoch": 0.4401154401154401,
"grad_norm": 0.1929093897342682,
"learning_rate": 6.203267848855828e-05,
"loss": 1.1864,
"step": 3660
},
{
"epoch": 0.44131794131794133,
"grad_norm": 0.19937632977962494,
"learning_rate": 6.184356807214493e-05,
"loss": 1.1752,
"step": 3670
},
{
"epoch": 0.44252044252044254,
"grad_norm": 0.20052993297576904,
"learning_rate": 6.165427798997046e-05,
"loss": 1.1702,
"step": 3680
},
{
"epoch": 0.44372294372294374,
"grad_norm": 0.20837515592575073,
"learning_rate": 6.146481111354676e-05,
"loss": 1.1869,
"step": 3690
},
{
"epoch": 0.44492544492544495,
"grad_norm": 0.19809991121292114,
"learning_rate": 6.127517031706769e-05,
"loss": 1.1922,
"step": 3700
},
{
"epoch": 0.44612794612794615,
"grad_norm": 0.21065044403076172,
"learning_rate": 6.108535847736546e-05,
"loss": 1.1839,
"step": 3710
},
{
"epoch": 0.44733044733044736,
"grad_norm": 0.20427262783050537,
"learning_rate": 6.089537847386696e-05,
"loss": 1.1687,
"step": 3720
},
{
"epoch": 0.4485329485329485,
"grad_norm": 0.21668106317520142,
"learning_rate": 6.070523318855015e-05,
"loss": 1.1811,
"step": 3730
},
{
"epoch": 0.4497354497354497,
"grad_norm": 0.20621725916862488,
"learning_rate": 6.051492550590029e-05,
"loss": 1.1824,
"step": 3740
},
{
"epoch": 0.4509379509379509,
"grad_norm": 0.21493861079216003,
"learning_rate": 6.032445831286615e-05,
"loss": 1.1799,
"step": 3750
},
{
"epoch": 0.4521404521404521,
"grad_norm": 0.20860455930233002,
"learning_rate": 6.013383449881631e-05,
"loss": 1.1793,
"step": 3760
},
{
"epoch": 0.4533429533429533,
"grad_norm": 0.19611907005310059,
"learning_rate": 5.994305695549526e-05,
"loss": 1.1845,
"step": 3770
},
{
"epoch": 0.45454545454545453,
"grad_norm": 0.20090104639530182,
"learning_rate": 5.9752128576979514e-05,
"loss": 1.1689,
"step": 3780
},
{
"epoch": 0.45574795574795574,
"grad_norm": 0.22553130984306335,
"learning_rate": 5.9561052259633796e-05,
"loss": 1.191,
"step": 3790
},
{
"epoch": 0.45695045695045694,
"grad_norm": 0.20412489771842957,
"learning_rate": 5.936983090206701e-05,
"loss": 1.1689,
"step": 3800
},
{
"epoch": 0.45815295815295815,
"grad_norm": 0.20666392147541046,
"learning_rate": 5.917846740508832e-05,
"loss": 1.175,
"step": 3810
},
{
"epoch": 0.45935545935545935,
"grad_norm": 0.20357099175453186,
"learning_rate": 5.8986964671663115e-05,
"loss": 1.1599,
"step": 3820
},
{
"epoch": 0.46055796055796056,
"grad_norm": 0.20838573575019836,
"learning_rate": 5.8795325606869025e-05,
"loss": 1.1801,
"step": 3830
},
{
"epoch": 0.46176046176046176,
"grad_norm": 0.21168790757656097,
"learning_rate": 5.8603553117851753e-05,
"loss": 1.1941,
"step": 3840
},
{
"epoch": 0.46296296296296297,
"grad_norm": 0.20583811402320862,
"learning_rate": 5.8411650113781094e-05,
"loss": 1.1712,
"step": 3850
},
{
"epoch": 0.46416546416546417,
"grad_norm": 0.20696823298931122,
"learning_rate": 5.82196195058067e-05,
"loss": 1.1683,
"step": 3860
},
{
"epoch": 0.4653679653679654,
"grad_norm": 0.19953173398971558,
"learning_rate": 5.802746420701399e-05,
"loss": 1.1861,
"step": 3870
},
{
"epoch": 0.4665704665704666,
"grad_norm": 0.20347267389297485,
"learning_rate": 5.783518713237993e-05,
"loss": 1.1963,
"step": 3880
},
{
"epoch": 0.4677729677729678,
"grad_norm": 0.20178109407424927,
"learning_rate": 5.7642791198728795e-05,
"loss": 1.165,
"step": 3890
},
{
"epoch": 0.468975468975469,
"grad_norm": 0.20376752316951752,
"learning_rate": 5.745027932468794e-05,
"loss": 1.1659,
"step": 3900
},
{
"epoch": 0.4701779701779702,
"grad_norm": 0.2073439061641693,
"learning_rate": 5.725765443064355e-05,
"loss": 1.1892,
"step": 3910
},
{
"epoch": 0.4713804713804714,
"grad_norm": 0.19912444055080414,
"learning_rate": 5.7064919438696275e-05,
"loss": 1.1858,
"step": 3920
},
{
"epoch": 0.4725829725829726,
"grad_norm": 0.20428648591041565,
"learning_rate": 5.687207727261699e-05,
"loss": 1.1786,
"step": 3930
},
{
"epoch": 0.4737854737854738,
"grad_norm": 0.21749798953533173,
"learning_rate": 5.667913085780232e-05,
"loss": 1.1752,
"step": 3940
},
{
"epoch": 0.474987974987975,
"grad_norm": 0.2052384912967682,
"learning_rate": 5.648608312123039e-05,
"loss": 1.1752,
"step": 3950
},
{
"epoch": 0.47619047619047616,
"grad_norm": 0.20432449877262115,
"learning_rate": 5.629293699141638e-05,
"loss": 1.1871,
"step": 3960
},
{
"epoch": 0.47739297739297737,
"grad_norm": 0.19696982204914093,
"learning_rate": 5.609969539836801e-05,
"loss": 1.1618,
"step": 3970
},
{
"epoch": 0.4785954785954786,
"grad_norm": 0.21920330822467804,
"learning_rate": 5.5906361273541244e-05,
"loss": 1.1877,
"step": 3980
},
{
"epoch": 0.4797979797979798,
"grad_norm": 0.21005605161190033,
"learning_rate": 5.5712937549795704e-05,
"loss": 1.1934,
"step": 3990
},
{
"epoch": 0.481000481000481,
"grad_norm": 0.20428964495658875,
"learning_rate": 5.551942716135021e-05,
"loss": 1.1762,
"step": 4000
},
{
"epoch": 0.481000481000481,
"eval_loss": 1.1611641645431519,
"eval_runtime": 14.3743,
"eval_samples_per_second": 17.81,
"eval_steps_per_second": 0.557,
"step": 4000
},
{
"epoch": 0.4822029822029822,
"grad_norm": 0.20652806758880615,
"learning_rate": 5.532583304373833e-05,
"loss": 1.1826,
"step": 4010
},
{
"epoch": 0.4834054834054834,
"grad_norm": 0.2061479687690735,
"learning_rate": 5.513215813376373e-05,
"loss": 1.1844,
"step": 4020
},
{
"epoch": 0.4846079846079846,
"grad_norm": 0.2116565853357315,
"learning_rate": 5.493840536945574e-05,
"loss": 1.172,
"step": 4030
},
{
"epoch": 0.4858104858104858,
"grad_norm": 0.20410141348838806,
"learning_rate": 5.47445776900247e-05,
"loss": 1.1754,
"step": 4040
},
{
"epoch": 0.487012987012987,
"grad_norm": 0.20021560788154602,
"learning_rate": 5.4550678035817404e-05,
"loss": 1.1713,
"step": 4050
},
{
"epoch": 0.4882154882154882,
"grad_norm": 0.20905666053295135,
"learning_rate": 5.435670934827253e-05,
"loss": 1.1746,
"step": 4060
},
{
"epoch": 0.4894179894179894,
"grad_norm": 0.2209169864654541,
"learning_rate": 5.416267456987596e-05,
"loss": 1.1763,
"step": 4070
},
{
"epoch": 0.4906204906204906,
"grad_norm": 0.19423747062683105,
"learning_rate": 5.3968576644116153e-05,
"loss": 1.1691,
"step": 4080
},
{
"epoch": 0.49182299182299183,
"grad_norm": 0.19502019882202148,
"learning_rate": 5.377441851543954e-05,
"loss": 1.1726,
"step": 4090
},
{
"epoch": 0.49302549302549303,
"grad_norm": 0.20400142669677734,
"learning_rate": 5.358020312920582e-05,
"loss": 1.1736,
"step": 4100
},
{
"epoch": 0.49422799422799424,
"grad_norm": 0.2074715495109558,
"learning_rate": 5.338593343164324e-05,
"loss": 1.1811,
"step": 4110
},
{
"epoch": 0.49543049543049544,
"grad_norm": 0.20175635814666748,
"learning_rate": 5.319161236980402e-05,
"loss": 1.1653,
"step": 4120
},
{
"epoch": 0.49663299663299665,
"grad_norm": 0.20830634236335754,
"learning_rate": 5.299724289151949e-05,
"loss": 1.1735,
"step": 4130
},
{
"epoch": 0.49783549783549785,
"grad_norm": 0.19762635231018066,
"learning_rate": 5.280282794535552e-05,
"loss": 1.1528,
"step": 4140
},
{
"epoch": 0.49903799903799906,
"grad_norm": 0.22000741958618164,
"learning_rate": 5.260837048056768e-05,
"loss": 1.1677,
"step": 4150
},
{
"epoch": 0.5002405002405003,
"grad_norm": 0.19429528713226318,
"learning_rate": 5.241387344705655e-05,
"loss": 1.1617,
"step": 4160
},
{
"epoch": 0.5014430014430015,
"grad_norm": 0.20485764741897583,
"learning_rate": 5.2219339795322985e-05,
"loss": 1.1745,
"step": 4170
},
{
"epoch": 0.5026455026455027,
"grad_norm": 0.20905248820781708,
"learning_rate": 5.2024772476423324e-05,
"loss": 1.1609,
"step": 4180
},
{
"epoch": 0.5038480038480039,
"grad_norm": 0.2026577889919281,
"learning_rate": 5.183017444192462e-05,
"loss": 1.1693,
"step": 4190
},
{
"epoch": 0.5050505050505051,
"grad_norm": 0.20207685232162476,
"learning_rate": 5.16355486438599e-05,
"loss": 1.1686,
"step": 4200
},
{
"epoch": 0.5062530062530063,
"grad_norm": 0.1890622228384018,
"learning_rate": 5.144089803468333e-05,
"loss": 1.1672,
"step": 4210
},
{
"epoch": 0.5074555074555075,
"grad_norm": 0.19660572707653046,
"learning_rate": 5.1246225567225506e-05,
"loss": 1.1672,
"step": 4220
},
{
"epoch": 0.5086580086580087,
"grad_norm": 0.20353861153125763,
"learning_rate": 5.1051534194648575e-05,
"loss": 1.1759,
"step": 4230
},
{
"epoch": 0.5098605098605099,
"grad_norm": 0.20563137531280518,
"learning_rate": 5.085682687040148e-05,
"loss": 1.1745,
"step": 4240
},
{
"epoch": 0.5110630110630111,
"grad_norm": 0.20082563161849976,
"learning_rate": 5.066210654817514e-05,
"loss": 1.1726,
"step": 4250
},
{
"epoch": 0.5122655122655123,
"grad_norm": 0.20972830057144165,
"learning_rate": 5.0467376181857694e-05,
"loss": 1.1588,
"step": 4260
},
{
"epoch": 0.5134680134680135,
"grad_norm": 0.21431012451648712,
"learning_rate": 5.0272638725489595e-05,
"loss": 1.159,
"step": 4270
},
{
"epoch": 0.5146705146705147,
"grad_norm": 0.20237547159194946,
"learning_rate": 5.0077897133218884e-05,
"loss": 1.1649,
"step": 4280
},
{
"epoch": 0.5158730158730159,
"grad_norm": 0.19756945967674255,
"learning_rate": 4.988315435925636e-05,
"loss": 1.1692,
"step": 4290
},
{
"epoch": 0.517075517075517,
"grad_norm": 0.2220674604177475,
"learning_rate": 4.968841335783066e-05,
"loss": 1.1713,
"step": 4300
},
{
"epoch": 0.5182780182780182,
"grad_norm": 0.20141322910785675,
"learning_rate": 4.949367708314365e-05,
"loss": 1.1604,
"step": 4310
},
{
"epoch": 0.5194805194805194,
"grad_norm": 0.2010202258825302,
"learning_rate": 4.929894848932541e-05,
"loss": 1.1758,
"step": 4320
},
{
"epoch": 0.5206830206830206,
"grad_norm": 0.20987989008426666,
"learning_rate": 4.910423053038955e-05,
"loss": 1.1618,
"step": 4330
},
{
"epoch": 0.5218855218855218,
"grad_norm": 0.20549160242080688,
"learning_rate": 4.890952616018831e-05,
"loss": 1.1745,
"step": 4340
},
{
"epoch": 0.523088023088023,
"grad_norm": 0.20881348848342896,
"learning_rate": 4.871483833236782e-05,
"loss": 1.1615,
"step": 4350
},
{
"epoch": 0.5242905242905243,
"grad_norm": 0.19820332527160645,
"learning_rate": 4.852017000032322e-05,
"loss": 1.1626,
"step": 4360
},
{
"epoch": 0.5254930254930255,
"grad_norm": 0.20698867738246918,
"learning_rate": 4.8325524117154003e-05,
"loss": 1.1647,
"step": 4370
},
{
"epoch": 0.5266955266955267,
"grad_norm": 0.1959027200937271,
"learning_rate": 4.813090363561896e-05,
"loss": 1.1777,
"step": 4380
},
{
"epoch": 0.5278980278980279,
"grad_norm": 0.19776858389377594,
"learning_rate": 4.79363115080917e-05,
"loss": 1.1574,
"step": 4390
},
{
"epoch": 0.5291005291005291,
"grad_norm": 0.20591264963150024,
"learning_rate": 4.774175068651558e-05,
"loss": 1.1642,
"step": 4400
},
{
"epoch": 0.5303030303030303,
"grad_norm": 0.20426321029663086,
"learning_rate": 4.7547224122359144e-05,
"loss": 1.1537,
"step": 4410
},
{
"epoch": 0.5315055315055315,
"grad_norm": 0.1917085498571396,
"learning_rate": 4.735273476657116e-05,
"loss": 1.1725,
"step": 4420
},
{
"epoch": 0.5327080327080327,
"grad_norm": 0.21214643120765686,
"learning_rate": 4.715828556953605e-05,
"loss": 1.1736,
"step": 4430
},
{
"epoch": 0.5339105339105339,
"grad_norm": 0.20908360183238983,
"learning_rate": 4.696387948102893e-05,
"loss": 1.1641,
"step": 4440
},
{
"epoch": 0.5351130351130351,
"grad_norm": 0.2076905071735382,
"learning_rate": 4.6769519450171074e-05,
"loss": 1.1695,
"step": 4450
},
{
"epoch": 0.5363155363155363,
"grad_norm": 0.212940976023674,
"learning_rate": 4.657520842538494e-05,
"loss": 1.1578,
"step": 4460
},
{
"epoch": 0.5375180375180375,
"grad_norm": 0.20305852591991425,
"learning_rate": 4.63809493543497e-05,
"loss": 1.1774,
"step": 4470
},
{
"epoch": 0.5387205387205387,
"grad_norm": 0.20637470483779907,
"learning_rate": 4.618674518395628e-05,
"loss": 1.1579,
"step": 4480
},
{
"epoch": 0.5399230399230399,
"grad_norm": 0.20714348554611206,
"learning_rate": 4.599259886026284e-05,
"loss": 1.1549,
"step": 4490
},
{
"epoch": 0.5411255411255411,
"grad_norm": 0.20754988491535187,
"learning_rate": 4.5798513328449976e-05,
"loss": 1.1563,
"step": 4500
},
{
"epoch": 0.5423280423280423,
"grad_norm": 0.20212216675281525,
"learning_rate": 4.560449153277611e-05,
"loss": 1.1645,
"step": 4510
},
{
"epoch": 0.5435305435305435,
"grad_norm": 0.1956162005662918,
"learning_rate": 4.541053641653274e-05,
"loss": 1.1674,
"step": 4520
},
{
"epoch": 0.5447330447330447,
"grad_norm": 0.19848354160785675,
"learning_rate": 4.521665092199991e-05,
"loss": 1.1677,
"step": 4530
},
{
"epoch": 0.5459355459355459,
"grad_norm": 0.19816353917121887,
"learning_rate": 4.502283799040145e-05,
"loss": 1.1626,
"step": 4540
},
{
"epoch": 0.5471380471380471,
"grad_norm": 0.20949548482894897,
"learning_rate": 4.482910056186047e-05,
"loss": 1.1752,
"step": 4550
},
{
"epoch": 0.5483405483405484,
"grad_norm": 0.20675860345363617,
"learning_rate": 4.463544157535464e-05,
"loss": 1.1584,
"step": 4560
},
{
"epoch": 0.5495430495430496,
"grad_norm": 0.2039814293384552,
"learning_rate": 4.444186396867175e-05,
"loss": 1.1671,
"step": 4570
},
{
"epoch": 0.5507455507455508,
"grad_norm": 0.2231522798538208,
"learning_rate": 4.4248370678365e-05,
"loss": 1.1648,
"step": 4580
},
{
"epoch": 0.551948051948052,
"grad_norm": 0.2013349086046219,
"learning_rate": 4.4054964639708554e-05,
"loss": 1.1764,
"step": 4590
},
{
"epoch": 0.5531505531505532,
"grad_norm": 0.20852096378803253,
"learning_rate": 4.386164878665297e-05,
"loss": 1.1717,
"step": 4600
},
{
"epoch": 0.5543530543530544,
"grad_norm": 0.21214091777801514,
"learning_rate": 4.3668426051780704e-05,
"loss": 1.1644,
"step": 4610
},
{
"epoch": 0.5555555555555556,
"grad_norm": 0.215733602643013,
"learning_rate": 4.3475299366261604e-05,
"loss": 1.1631,
"step": 4620
},
{
"epoch": 0.5567580567580568,
"grad_norm": 0.20751333236694336,
"learning_rate": 4.328227165980847e-05,
"loss": 1.1545,
"step": 4630
},
{
"epoch": 0.557960557960558,
"grad_norm": 0.1947864145040512,
"learning_rate": 4.3089345860632594e-05,
"loss": 1.1601,
"step": 4640
},
{
"epoch": 0.5591630591630592,
"grad_norm": 0.20173963904380798,
"learning_rate": 4.289652489539937e-05,
"loss": 1.1521,
"step": 4650
},
{
"epoch": 0.5603655603655604,
"grad_norm": 0.19809898734092712,
"learning_rate": 4.2703811689183797e-05,
"loss": 1.1608,
"step": 4660
},
{
"epoch": 0.5615680615680616,
"grad_norm": 0.2071085423231125,
"learning_rate": 4.2511209165426306e-05,
"loss": 1.1748,
"step": 4670
},
{
"epoch": 0.5627705627705628,
"grad_norm": 0.1951216459274292,
"learning_rate": 4.2318720245888135e-05,
"loss": 1.1685,
"step": 4680
},
{
"epoch": 0.563973063973064,
"grad_norm": 0.1975964456796646,
"learning_rate": 4.212634785060728e-05,
"loss": 1.1637,
"step": 4690
},
{
"epoch": 0.5651755651755652,
"grad_norm": 0.2012360543012619,
"learning_rate": 4.193409489785403e-05,
"loss": 1.1811,
"step": 4700
},
{
"epoch": 0.5663780663780664,
"grad_norm": 0.20509780943393707,
"learning_rate": 4.174196430408673e-05,
"loss": 1.1719,
"step": 4710
},
{
"epoch": 0.5675805675805676,
"grad_norm": 0.19332098960876465,
"learning_rate": 4.1549958983907555e-05,
"loss": 1.1605,
"step": 4720
},
{
"epoch": 0.5687830687830688,
"grad_norm": 0.20153947174549103,
"learning_rate": 4.1358081850018326e-05,
"loss": 1.1546,
"step": 4730
},
{
"epoch": 0.56998556998557,
"grad_norm": 0.2031155526638031,
"learning_rate": 4.116633581317623e-05,
"loss": 1.1708,
"step": 4740
},
{
"epoch": 0.5711880711880711,
"grad_norm": 0.1971137523651123,
"learning_rate": 4.097472378214981e-05,
"loss": 1.1527,
"step": 4750
},
{
"epoch": 0.5723905723905723,
"grad_norm": 0.20589309930801392,
"learning_rate": 4.0783248663674645e-05,
"loss": 1.1551,
"step": 4760
},
{
"epoch": 0.5735930735930735,
"grad_norm": 0.21493607759475708,
"learning_rate": 4.059191336240947e-05,
"loss": 1.166,
"step": 4770
},
{
"epoch": 0.5747955747955747,
"grad_norm": 0.20907354354858398,
"learning_rate": 4.0400720780891935e-05,
"loss": 1.1576,
"step": 4780
},
{
"epoch": 0.575998075998076,
"grad_norm": 0.20002475380897522,
"learning_rate": 4.0209673819494695e-05,
"loss": 1.1631,
"step": 4790
},
{
"epoch": 0.5772005772005772,
"grad_norm": 0.1972057819366455,
"learning_rate": 4.00187753763813e-05,
"loss": 1.1631,
"step": 4800
},
{
"epoch": 0.5784030784030784,
"grad_norm": 0.2116405963897705,
"learning_rate": 3.982802834746236e-05,
"loss": 1.1727,
"step": 4810
},
{
"epoch": 0.5796055796055796,
"grad_norm": 0.19388829171657562,
"learning_rate": 3.963743562635149e-05,
"loss": 1.1569,
"step": 4820
},
{
"epoch": 0.5808080808080808,
"grad_norm": 0.2148236185312271,
"learning_rate": 3.9447000104321544e-05,
"loss": 1.1676,
"step": 4830
},
{
"epoch": 0.582010582010582,
"grad_norm": 0.20140714943408966,
"learning_rate": 3.925672467026057e-05,
"loss": 1.1517,
"step": 4840
},
{
"epoch": 0.5832130832130832,
"grad_norm": 0.20167213678359985,
"learning_rate": 3.9066612210628216e-05,
"loss": 1.1651,
"step": 4850
},
{
"epoch": 0.5844155844155844,
"grad_norm": 0.20786848664283752,
"learning_rate": 3.887666560941174e-05,
"loss": 1.1512,
"step": 4860
},
{
"epoch": 0.5856180856180856,
"grad_norm": 0.19768615067005157,
"learning_rate": 3.868688774808239e-05,
"loss": 1.1676,
"step": 4870
},
{
"epoch": 0.5868205868205868,
"grad_norm": 0.21151971817016602,
"learning_rate": 3.849728150555161e-05,
"loss": 1.1466,
"step": 4880
},
{
"epoch": 0.588023088023088,
"grad_norm": 0.21175932884216309,
"learning_rate": 3.8307849758127454e-05,
"loss": 1.1508,
"step": 4890
},
{
"epoch": 0.5892255892255892,
"grad_norm": 0.20844349265098572,
"learning_rate": 3.8118595379470826e-05,
"loss": 1.1757,
"step": 4900
},
{
"epoch": 0.5904280904280904,
"grad_norm": 0.19040937721729279,
"learning_rate": 3.7929521240552066e-05,
"loss": 1.1708,
"step": 4910
},
{
"epoch": 0.5916305916305916,
"grad_norm": 0.20293647050857544,
"learning_rate": 3.77406302096072e-05,
"loss": 1.1553,
"step": 4920
},
{
"epoch": 0.5928330928330928,
"grad_norm": 0.21380431950092316,
"learning_rate": 3.755192515209458e-05,
"loss": 1.1618,
"step": 4930
},
{
"epoch": 0.594035594035594,
"grad_norm": 0.2078305035829544,
"learning_rate": 3.7363408930651326e-05,
"loss": 1.1498,
"step": 4940
},
{
"epoch": 0.5952380952380952,
"grad_norm": 0.19942502677440643,
"learning_rate": 3.7175084405049975e-05,
"loss": 1.1555,
"step": 4950
},
{
"epoch": 0.5964405964405964,
"grad_norm": 0.2114267349243164,
"learning_rate": 3.6986954432155e-05,
"loss": 1.1674,
"step": 4960
},
{
"epoch": 0.5976430976430976,
"grad_norm": 0.19917641580104828,
"learning_rate": 3.679902186587959e-05,
"loss": 1.1687,
"step": 4970
},
{
"epoch": 0.5988455988455988,
"grad_norm": 0.2174818068742752,
"learning_rate": 3.6611289557142226e-05,
"loss": 1.1681,
"step": 4980
},
{
"epoch": 0.6000481000481,
"grad_norm": 0.193511962890625,
"learning_rate": 3.642376035382359e-05,
"loss": 1.1653,
"step": 4990
},
{
"epoch": 0.6012506012506013,
"grad_norm": 0.2069484144449234,
"learning_rate": 3.6236437100723194e-05,
"loss": 1.1723,
"step": 5000
},
{
"epoch": 0.6012506012506013,
"eval_loss": 1.1470744609832764,
"eval_runtime": 14.362,
"eval_samples_per_second": 17.825,
"eval_steps_per_second": 0.557,
"step": 5000
}
],
"logging_steps": 10,
"max_steps": 8316,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 4.858009161721446e+19,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}