nareshmlx's picture
Upload folder using huggingface_hub
2a05d02 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 5346,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.014029180695847363,
"grad_norm": 2.10097336769104,
"learning_rate": 0.00019928852274854898,
"loss": 1.882,
"step": 25
},
{
"epoch": 0.028058361391694726,
"grad_norm": 1.7152155637741089,
"learning_rate": 0.00019835236847032392,
"loss": 1.9549,
"step": 50
},
{
"epoch": 0.04208754208754209,
"grad_norm": 1.2712793350219727,
"learning_rate": 0.00019741621419209885,
"loss": 2.0645,
"step": 75
},
{
"epoch": 0.05611672278338945,
"grad_norm": 1.8182361125946045,
"learning_rate": 0.00019648005991387382,
"loss": 2.0212,
"step": 100
},
{
"epoch": 0.07014590347923681,
"grad_norm": 2.1019437313079834,
"learning_rate": 0.00019554390563564878,
"loss": 1.866,
"step": 125
},
{
"epoch": 0.08417508417508418,
"grad_norm": 1.8642802238464355,
"learning_rate": 0.0001946077513574237,
"loss": 1.7157,
"step": 150
},
{
"epoch": 0.09820426487093153,
"grad_norm": 2.941638946533203,
"learning_rate": 0.00019367159707919866,
"loss": 1.9151,
"step": 175
},
{
"epoch": 0.1122334455667789,
"grad_norm": 2.4754459857940674,
"learning_rate": 0.00019273544280097363,
"loss": 1.8758,
"step": 200
},
{
"epoch": 0.12626262626262627,
"grad_norm": 2.7489519119262695,
"learning_rate": 0.00019179928852274856,
"loss": 1.8385,
"step": 225
},
{
"epoch": 0.14029180695847362,
"grad_norm": 1.8580266237258911,
"learning_rate": 0.0001908631342445235,
"loss": 1.8651,
"step": 250
},
{
"epoch": 0.15432098765432098,
"grad_norm": 2.3558552265167236,
"learning_rate": 0.00018992697996629844,
"loss": 1.7925,
"step": 275
},
{
"epoch": 0.16835016835016836,
"grad_norm": 4.575771331787109,
"learning_rate": 0.0001889908256880734,
"loss": 1.7716,
"step": 300
},
{
"epoch": 0.18237934904601572,
"grad_norm": 3.215369701385498,
"learning_rate": 0.00018805467140984837,
"loss": 1.9096,
"step": 325
},
{
"epoch": 0.19640852974186307,
"grad_norm": 1.1048444509506226,
"learning_rate": 0.00018711851713162328,
"loss": 1.7396,
"step": 350
},
{
"epoch": 0.21043771043771045,
"grad_norm": 3.72884202003479,
"learning_rate": 0.00018618236285339825,
"loss": 1.9161,
"step": 375
},
{
"epoch": 0.2244668911335578,
"grad_norm": 2.643915891647339,
"learning_rate": 0.00018524620857517321,
"loss": 2.2453,
"step": 400
},
{
"epoch": 0.23849607182940516,
"grad_norm": 2.8237082958221436,
"learning_rate": 0.00018431005429694815,
"loss": 2.4445,
"step": 425
},
{
"epoch": 0.25252525252525254,
"grad_norm": 1.8723883628845215,
"learning_rate": 0.0001833739000187231,
"loss": 2.4039,
"step": 450
},
{
"epoch": 0.2665544332210999,
"grad_norm": 1.9935405254364014,
"learning_rate": 0.00018243774574049803,
"loss": 2.3307,
"step": 475
},
{
"epoch": 0.28058361391694725,
"grad_norm": 1.7783311605453491,
"learning_rate": 0.000181501591462273,
"loss": 2.4179,
"step": 500
},
{
"epoch": 0.2946127946127946,
"grad_norm": 1.417754054069519,
"learning_rate": 0.00018056543718404796,
"loss": 2.3088,
"step": 525
},
{
"epoch": 0.30864197530864196,
"grad_norm": 2.444193124771118,
"learning_rate": 0.00017962928290582287,
"loss": 2.4227,
"step": 550
},
{
"epoch": 0.3226711560044893,
"grad_norm": 2.255748987197876,
"learning_rate": 0.00017869312862759784,
"loss": 2.3584,
"step": 575
},
{
"epoch": 0.3367003367003367,
"grad_norm": 2.1884357929229736,
"learning_rate": 0.0001777569743493728,
"loss": 2.1804,
"step": 600
},
{
"epoch": 0.3507295173961841,
"grad_norm": 1.6140755414962769,
"learning_rate": 0.00017682082007114774,
"loss": 2.4004,
"step": 625
},
{
"epoch": 0.36475869809203143,
"grad_norm": 1.222005844116211,
"learning_rate": 0.00017588466579292268,
"loss": 2.41,
"step": 650
},
{
"epoch": 0.3787878787878788,
"grad_norm": 2.1180336475372314,
"learning_rate": 0.00017494851151469762,
"loss": 2.3695,
"step": 675
},
{
"epoch": 0.39281705948372614,
"grad_norm": 2.320734739303589,
"learning_rate": 0.00017401235723647258,
"loss": 2.4031,
"step": 700
},
{
"epoch": 0.4068462401795735,
"grad_norm": 1.6662636995315552,
"learning_rate": 0.00017307620295824755,
"loss": 2.4192,
"step": 725
},
{
"epoch": 0.4208754208754209,
"grad_norm": 1.4911861419677734,
"learning_rate": 0.00017214004868002246,
"loss": 2.3,
"step": 750
},
{
"epoch": 0.43490460157126826,
"grad_norm": 1.3176157474517822,
"learning_rate": 0.00017120389440179742,
"loss": 2.3768,
"step": 775
},
{
"epoch": 0.4489337822671156,
"grad_norm": 1.7129669189453125,
"learning_rate": 0.0001702677401235724,
"loss": 2.4815,
"step": 800
},
{
"epoch": 0.46296296296296297,
"grad_norm": 1.7648168802261353,
"learning_rate": 0.00016933158584534733,
"loss": 2.4509,
"step": 825
},
{
"epoch": 0.4769921436588103,
"grad_norm": 1.7576874494552612,
"learning_rate": 0.00016839543156712227,
"loss": 2.4697,
"step": 850
},
{
"epoch": 0.4910213243546577,
"grad_norm": 2.3778634071350098,
"learning_rate": 0.0001674592772888972,
"loss": 2.416,
"step": 875
},
{
"epoch": 0.5050505050505051,
"grad_norm": 2.6142146587371826,
"learning_rate": 0.00016652312301067217,
"loss": 2.3169,
"step": 900
},
{
"epoch": 0.5190796857463524,
"grad_norm": 1.3990776538848877,
"learning_rate": 0.00016558696873244713,
"loss": 2.3465,
"step": 925
},
{
"epoch": 0.5331088664421998,
"grad_norm": 1.65084707736969,
"learning_rate": 0.00016465081445422205,
"loss": 2.3098,
"step": 950
},
{
"epoch": 0.5471380471380471,
"grad_norm": 1.5017579793930054,
"learning_rate": 0.000163714660175997,
"loss": 2.3946,
"step": 975
},
{
"epoch": 0.5611672278338945,
"grad_norm": 1.3995898962020874,
"learning_rate": 0.00016277850589777198,
"loss": 2.3814,
"step": 1000
},
{
"epoch": 0.5751964085297419,
"grad_norm": 1.5977699756622314,
"learning_rate": 0.00016184235161954691,
"loss": 2.2956,
"step": 1025
},
{
"epoch": 0.5892255892255892,
"grad_norm": 1.5083733797073364,
"learning_rate": 0.00016090619734132185,
"loss": 2.4089,
"step": 1050
},
{
"epoch": 0.6032547699214366,
"grad_norm": 1.9143089056015015,
"learning_rate": 0.0001599700430630968,
"loss": 2.511,
"step": 1075
},
{
"epoch": 0.6172839506172839,
"grad_norm": 1.4765998125076294,
"learning_rate": 0.00015903388878487176,
"loss": 2.4673,
"step": 1100
},
{
"epoch": 0.6313131313131313,
"grad_norm": 1.5570839643478394,
"learning_rate": 0.00015809773450664672,
"loss": 2.4941,
"step": 1125
},
{
"epoch": 0.6453423120089786,
"grad_norm": 1.695884346961975,
"learning_rate": 0.00015716158022842163,
"loss": 2.54,
"step": 1150
},
{
"epoch": 0.6593714927048261,
"grad_norm": 1.961042881011963,
"learning_rate": 0.0001562254259501966,
"loss": 2.4232,
"step": 1175
},
{
"epoch": 0.6734006734006734,
"grad_norm": 1.2104586362838745,
"learning_rate": 0.00015528927167197156,
"loss": 2.4932,
"step": 1200
},
{
"epoch": 0.6874298540965208,
"grad_norm": 2.0136559009552,
"learning_rate": 0.0001543531173937465,
"loss": 2.5531,
"step": 1225
},
{
"epoch": 0.7014590347923682,
"grad_norm": 1.224069595336914,
"learning_rate": 0.00015341696311552144,
"loss": 2.4507,
"step": 1250
},
{
"epoch": 0.7154882154882155,
"grad_norm": 2.973175287246704,
"learning_rate": 0.00015248080883729638,
"loss": 2.4473,
"step": 1275
},
{
"epoch": 0.7295173961840629,
"grad_norm": 1.4036904573440552,
"learning_rate": 0.00015154465455907134,
"loss": 2.4077,
"step": 1300
},
{
"epoch": 0.7435465768799102,
"grad_norm": 1.9783000946044922,
"learning_rate": 0.0001506085002808463,
"loss": 2.5539,
"step": 1325
},
{
"epoch": 0.7575757575757576,
"grad_norm": 1.4202359914779663,
"learning_rate": 0.00014967234600262122,
"loss": 2.4247,
"step": 1350
},
{
"epoch": 0.7716049382716049,
"grad_norm": 2.6062519550323486,
"learning_rate": 0.00014873619172439619,
"loss": 2.5282,
"step": 1375
},
{
"epoch": 0.7856341189674523,
"grad_norm": 1.368980884552002,
"learning_rate": 0.00014780003744617115,
"loss": 2.3536,
"step": 1400
},
{
"epoch": 0.7996632996632996,
"grad_norm": 1.3597590923309326,
"learning_rate": 0.0001468638831679461,
"loss": 2.4658,
"step": 1425
},
{
"epoch": 0.813692480359147,
"grad_norm": 1.845139741897583,
"learning_rate": 0.00014592772888972103,
"loss": 2.38,
"step": 1450
},
{
"epoch": 0.8277216610549943,
"grad_norm": 1.7318049669265747,
"learning_rate": 0.00014499157461149597,
"loss": 2.2395,
"step": 1475
},
{
"epoch": 0.8417508417508418,
"grad_norm": 1.1624616384506226,
"learning_rate": 0.00014405542033327093,
"loss": 2.5868,
"step": 1500
},
{
"epoch": 0.8557800224466892,
"grad_norm": 2.0583107471466064,
"learning_rate": 0.0001431192660550459,
"loss": 2.7289,
"step": 1525
},
{
"epoch": 0.8698092031425365,
"grad_norm": 1.7552924156188965,
"learning_rate": 0.0001421831117768208,
"loss": 2.5651,
"step": 1550
},
{
"epoch": 0.8838383838383839,
"grad_norm": 2.457676887512207,
"learning_rate": 0.00014124695749859577,
"loss": 2.6289,
"step": 1575
},
{
"epoch": 0.8978675645342312,
"grad_norm": 1.9127784967422485,
"learning_rate": 0.00014031080322037074,
"loss": 2.4841,
"step": 1600
},
{
"epoch": 0.9118967452300786,
"grad_norm": 1.530076503753662,
"learning_rate": 0.00013937464894214568,
"loss": 2.5304,
"step": 1625
},
{
"epoch": 0.9259259259259259,
"grad_norm": 1.9689760208129883,
"learning_rate": 0.00013843849466392062,
"loss": 2.4602,
"step": 1650
},
{
"epoch": 0.9399551066217733,
"grad_norm": 1.9776215553283691,
"learning_rate": 0.00013750234038569555,
"loss": 2.6117,
"step": 1675
},
{
"epoch": 0.9539842873176206,
"grad_norm": 1.14767587184906,
"learning_rate": 0.00013656618610747052,
"loss": 2.5325,
"step": 1700
},
{
"epoch": 0.968013468013468,
"grad_norm": 1.2019888162612915,
"learning_rate": 0.00013563003182924548,
"loss": 2.4043,
"step": 1725
},
{
"epoch": 0.9820426487093153,
"grad_norm": 2.406468391418457,
"learning_rate": 0.0001346938775510204,
"loss": 2.5213,
"step": 1750
},
{
"epoch": 0.9960718294051627,
"grad_norm": 1.324762225151062,
"learning_rate": 0.00013375772327279536,
"loss": 2.3417,
"step": 1775
},
{
"epoch": 1.0101010101010102,
"grad_norm": 2.2722926139831543,
"learning_rate": 0.00013282156899457033,
"loss": 2.2183,
"step": 1800
},
{
"epoch": 1.0241301907968574,
"grad_norm": 2.4847495555877686,
"learning_rate": 0.00013188541471634526,
"loss": 1.8048,
"step": 1825
},
{
"epoch": 1.0381593714927049,
"grad_norm": 1.7608667612075806,
"learning_rate": 0.0001309492604381202,
"loss": 1.9154,
"step": 1850
},
{
"epoch": 1.0521885521885521,
"grad_norm": 1.1851481199264526,
"learning_rate": 0.00013001310615989514,
"loss": 2.073,
"step": 1875
},
{
"epoch": 1.0662177328843996,
"grad_norm": 1.8013004064559937,
"learning_rate": 0.0001290769518816701,
"loss": 1.8506,
"step": 1900
},
{
"epoch": 1.0802469135802468,
"grad_norm": 2.466134786605835,
"learning_rate": 0.00012814079760344507,
"loss": 1.9893,
"step": 1925
},
{
"epoch": 1.0942760942760943,
"grad_norm": 2.324709892272949,
"learning_rate": 0.00012720464332521998,
"loss": 1.7583,
"step": 1950
},
{
"epoch": 1.1083052749719418,
"grad_norm": 2.0219881534576416,
"learning_rate": 0.00012626848904699495,
"loss": 1.8451,
"step": 1975
},
{
"epoch": 1.122334455667789,
"grad_norm": 3.023254156112671,
"learning_rate": 0.00012533233476876991,
"loss": 1.991,
"step": 2000
},
{
"epoch": 1.1363636363636362,
"grad_norm": 3.2770721912384033,
"learning_rate": 0.00012439618049054485,
"loss": 1.9296,
"step": 2025
},
{
"epoch": 1.1503928170594837,
"grad_norm": 2.616680860519409,
"learning_rate": 0.0001234600262123198,
"loss": 2.0268,
"step": 2050
},
{
"epoch": 1.1644219977553312,
"grad_norm": 2.751861572265625,
"learning_rate": 0.00012252387193409473,
"loss": 2.0529,
"step": 2075
},
{
"epoch": 1.1784511784511784,
"grad_norm": 2.038623571395874,
"learning_rate": 0.0001215877176558697,
"loss": 1.8629,
"step": 2100
},
{
"epoch": 1.1924803591470259,
"grad_norm": 3.2588765621185303,
"learning_rate": 0.00012065156337764465,
"loss": 1.8359,
"step": 2125
},
{
"epoch": 1.2065095398428731,
"grad_norm": 2.306049346923828,
"learning_rate": 0.00011971540909941958,
"loss": 2.1057,
"step": 2150
},
{
"epoch": 1.2205387205387206,
"grad_norm": 2.9211647510528564,
"learning_rate": 0.00011877925482119454,
"loss": 1.8737,
"step": 2175
},
{
"epoch": 1.2345679012345678,
"grad_norm": 2.019728183746338,
"learning_rate": 0.0001178431005429695,
"loss": 1.946,
"step": 2200
},
{
"epoch": 1.2485970819304153,
"grad_norm": 2.480238437652588,
"learning_rate": 0.00011690694626474443,
"loss": 2.0493,
"step": 2225
},
{
"epoch": 1.2626262626262625,
"grad_norm": 2.5714902877807617,
"learning_rate": 0.00011597079198651939,
"loss": 2.0727,
"step": 2250
},
{
"epoch": 1.27665544332211,
"grad_norm": 3.412736177444458,
"learning_rate": 0.00011503463770829433,
"loss": 1.8885,
"step": 2275
},
{
"epoch": 1.2906846240179575,
"grad_norm": 2.0062901973724365,
"learning_rate": 0.00011409848343006928,
"loss": 1.8365,
"step": 2300
},
{
"epoch": 1.3047138047138047,
"grad_norm": 3.08791184425354,
"learning_rate": 0.00011316232915184423,
"loss": 1.9038,
"step": 2325
},
{
"epoch": 1.318742985409652,
"grad_norm": 2.814711332321167,
"learning_rate": 0.00011222617487361917,
"loss": 1.8955,
"step": 2350
},
{
"epoch": 1.3327721661054994,
"grad_norm": 3.6443734169006348,
"learning_rate": 0.00011129002059539412,
"loss": 1.781,
"step": 2375
},
{
"epoch": 1.3468013468013469,
"grad_norm": 1.6001911163330078,
"learning_rate": 0.00011035386631716909,
"loss": 1.9867,
"step": 2400
},
{
"epoch": 1.3608305274971941,
"grad_norm": 0.6663380265235901,
"learning_rate": 0.00010941771203894401,
"loss": 1.9535,
"step": 2425
},
{
"epoch": 1.3748597081930416,
"grad_norm": 1.7926398515701294,
"learning_rate": 0.00010848155776071898,
"loss": 1.8982,
"step": 2450
},
{
"epoch": 1.3888888888888888,
"grad_norm": 2.20592999458313,
"learning_rate": 0.00010754540348249392,
"loss": 1.909,
"step": 2475
},
{
"epoch": 1.4029180695847363,
"grad_norm": 2.376415491104126,
"learning_rate": 0.00010660924920426887,
"loss": 1.7022,
"step": 2500
},
{
"epoch": 1.4169472502805835,
"grad_norm": 2.5050957202911377,
"learning_rate": 0.00010567309492604382,
"loss": 1.7764,
"step": 2525
},
{
"epoch": 1.430976430976431,
"grad_norm": 2.35119366645813,
"learning_rate": 0.00010473694064781876,
"loss": 1.925,
"step": 2550
},
{
"epoch": 1.4450056116722783,
"grad_norm": 3.448140859603882,
"learning_rate": 0.00010380078636959371,
"loss": 1.8381,
"step": 2575
},
{
"epoch": 1.4590347923681257,
"grad_norm": 2.3299753665924072,
"learning_rate": 0.00010286463209136868,
"loss": 2.0923,
"step": 2600
},
{
"epoch": 1.4730639730639732,
"grad_norm": 2.122396945953369,
"learning_rate": 0.0001019284778131436,
"loss": 1.8968,
"step": 2625
},
{
"epoch": 1.4870931537598204,
"grad_norm": 2.1543469429016113,
"learning_rate": 0.00010099232353491857,
"loss": 1.9071,
"step": 2650
},
{
"epoch": 1.5011223344556677,
"grad_norm": 1.571052074432373,
"learning_rate": 0.0001000561692566935,
"loss": 1.9378,
"step": 2675
},
{
"epoch": 1.5151515151515151,
"grad_norm": 1.9749538898468018,
"learning_rate": 9.912001497846846e-05,
"loss": 2.0446,
"step": 2700
},
{
"epoch": 1.5291806958473626,
"grad_norm": 2.5454633235931396,
"learning_rate": 9.81838607002434e-05,
"loss": 1.9472,
"step": 2725
},
{
"epoch": 1.5432098765432098,
"grad_norm": 2.261444091796875,
"learning_rate": 9.724770642201836e-05,
"loss": 1.97,
"step": 2750
},
{
"epoch": 1.557239057239057,
"grad_norm": 1.7348928451538086,
"learning_rate": 9.63115521437933e-05,
"loss": 2.0048,
"step": 2775
},
{
"epoch": 1.5712682379349046,
"grad_norm": 3.484074831008911,
"learning_rate": 9.537539786556825e-05,
"loss": 1.9444,
"step": 2800
},
{
"epoch": 1.585297418630752,
"grad_norm": 3.5576932430267334,
"learning_rate": 9.443924358734319e-05,
"loss": 1.9316,
"step": 2825
},
{
"epoch": 1.5993265993265995,
"grad_norm": 3.098278522491455,
"learning_rate": 9.350308930911815e-05,
"loss": 1.9792,
"step": 2850
},
{
"epoch": 1.6133557800224467,
"grad_norm": 1.933929681777954,
"learning_rate": 9.256693503089309e-05,
"loss": 1.8876,
"step": 2875
},
{
"epoch": 1.627384960718294,
"grad_norm": 1.292943000793457,
"learning_rate": 9.163078075266804e-05,
"loss": 2.0678,
"step": 2900
},
{
"epoch": 1.6414141414141414,
"grad_norm": 3.300262928009033,
"learning_rate": 9.069462647444298e-05,
"loss": 2.0715,
"step": 2925
},
{
"epoch": 1.655443322109989,
"grad_norm": 1.1303402185440063,
"learning_rate": 8.975847219621795e-05,
"loss": 1.8957,
"step": 2950
},
{
"epoch": 1.6694725028058361,
"grad_norm": 2.505016803741455,
"learning_rate": 8.882231791799289e-05,
"loss": 1.9968,
"step": 2975
},
{
"epoch": 1.6835016835016834,
"grad_norm": 2.983059883117676,
"learning_rate": 8.788616363976784e-05,
"loss": 1.9622,
"step": 3000
},
{
"epoch": 1.6975308641975309,
"grad_norm": 2.4133288860321045,
"learning_rate": 8.695000936154279e-05,
"loss": 1.8102,
"step": 3025
},
{
"epoch": 1.7115600448933783,
"grad_norm": 2.2003655433654785,
"learning_rate": 8.601385508331774e-05,
"loss": 1.8454,
"step": 3050
},
{
"epoch": 1.7255892255892256,
"grad_norm": 2.353689193725586,
"learning_rate": 8.507770080509268e-05,
"loss": 1.9382,
"step": 3075
},
{
"epoch": 1.7396184062850728,
"grad_norm": 3.0705206394195557,
"learning_rate": 8.414154652686763e-05,
"loss": 1.853,
"step": 3100
},
{
"epoch": 1.7536475869809203,
"grad_norm": 3.063894748687744,
"learning_rate": 8.320539224864258e-05,
"loss": 2.02,
"step": 3125
},
{
"epoch": 1.7676767676767677,
"grad_norm": 1.68687105178833,
"learning_rate": 8.226923797041754e-05,
"loss": 2.0283,
"step": 3150
},
{
"epoch": 1.7817059483726152,
"grad_norm": 2.443969249725342,
"learning_rate": 8.133308369219247e-05,
"loss": 2.0006,
"step": 3175
},
{
"epoch": 1.7957351290684624,
"grad_norm": 1.850441336631775,
"learning_rate": 8.039692941396743e-05,
"loss": 2.0027,
"step": 3200
},
{
"epoch": 1.8097643097643097,
"grad_norm": 2.0784268379211426,
"learning_rate": 7.946077513574238e-05,
"loss": 1.8414,
"step": 3225
},
{
"epoch": 1.8237934904601572,
"grad_norm": 2.295764207839966,
"learning_rate": 7.852462085751733e-05,
"loss": 2.044,
"step": 3250
},
{
"epoch": 1.8378226711560046,
"grad_norm": 2.173306941986084,
"learning_rate": 7.758846657929227e-05,
"loss": 1.9782,
"step": 3275
},
{
"epoch": 1.8518518518518519,
"grad_norm": 0.9976969957351685,
"learning_rate": 7.665231230106722e-05,
"loss": 1.9107,
"step": 3300
},
{
"epoch": 1.865881032547699,
"grad_norm": 1.8214789628982544,
"learning_rate": 7.571615802284217e-05,
"loss": 2.0214,
"step": 3325
},
{
"epoch": 1.8799102132435466,
"grad_norm": 2.858731985092163,
"learning_rate": 7.478000374461712e-05,
"loss": 1.8701,
"step": 3350
},
{
"epoch": 1.893939393939394,
"grad_norm": 4.03577995300293,
"learning_rate": 7.384384946639206e-05,
"loss": 1.9072,
"step": 3375
},
{
"epoch": 1.9079685746352413,
"grad_norm": 1.9624089002609253,
"learning_rate": 7.290769518816701e-05,
"loss": 2.0305,
"step": 3400
},
{
"epoch": 1.9219977553310885,
"grad_norm": 3.1496636867523193,
"learning_rate": 7.197154090994197e-05,
"loss": 1.9287,
"step": 3425
},
{
"epoch": 1.936026936026936,
"grad_norm": 2.4287681579589844,
"learning_rate": 7.103538663171692e-05,
"loss": 1.8393,
"step": 3450
},
{
"epoch": 1.9500561167227835,
"grad_norm": 2.4512710571289062,
"learning_rate": 7.009923235349186e-05,
"loss": 1.9218,
"step": 3475
},
{
"epoch": 1.964085297418631,
"grad_norm": 3.565269947052002,
"learning_rate": 6.916307807526681e-05,
"loss": 1.9244,
"step": 3500
},
{
"epoch": 1.9781144781144782,
"grad_norm": 3.4579124450683594,
"learning_rate": 6.822692379704176e-05,
"loss": 1.9715,
"step": 3525
},
{
"epoch": 1.9921436588103254,
"grad_norm": 2.4138684272766113,
"learning_rate": 6.729076951881671e-05,
"loss": 1.9685,
"step": 3550
},
{
"epoch": 2.006172839506173,
"grad_norm": 2.728733777999878,
"learning_rate": 6.635461524059165e-05,
"loss": 1.895,
"step": 3575
},
{
"epoch": 2.0202020202020203,
"grad_norm": 3.295377016067505,
"learning_rate": 6.54184609623666e-05,
"loss": 1.5609,
"step": 3600
},
{
"epoch": 2.0342312008978674,
"grad_norm": 1.7398282289505005,
"learning_rate": 6.448230668414155e-05,
"loss": 1.4492,
"step": 3625
},
{
"epoch": 2.048260381593715,
"grad_norm": 2.4340431690216064,
"learning_rate": 6.354615240591649e-05,
"loss": 1.5446,
"step": 3650
},
{
"epoch": 2.0622895622895623,
"grad_norm": 2.2065236568450928,
"learning_rate": 6.260999812769144e-05,
"loss": 1.5074,
"step": 3675
},
{
"epoch": 2.0763187429854097,
"grad_norm": 2.7881624698638916,
"learning_rate": 6.16738438494664e-05,
"loss": 1.4395,
"step": 3700
},
{
"epoch": 2.090347923681257,
"grad_norm": 2.9817967414855957,
"learning_rate": 6.073768957124134e-05,
"loss": 1.4299,
"step": 3725
},
{
"epoch": 2.1043771043771042,
"grad_norm": 2.3257029056549072,
"learning_rate": 5.9801535293016285e-05,
"loss": 1.4431,
"step": 3750
},
{
"epoch": 2.1184062850729517,
"grad_norm": 2.277677297592163,
"learning_rate": 5.8865381014791244e-05,
"loss": 1.5018,
"step": 3775
},
{
"epoch": 2.132435465768799,
"grad_norm": 3.131730318069458,
"learning_rate": 5.792922673656619e-05,
"loss": 1.3983,
"step": 3800
},
{
"epoch": 2.1464646464646466,
"grad_norm": 3.49137020111084,
"learning_rate": 5.6993072458341134e-05,
"loss": 1.543,
"step": 3825
},
{
"epoch": 2.1604938271604937,
"grad_norm": 1.567698359489441,
"learning_rate": 5.605691818011608e-05,
"loss": 1.552,
"step": 3850
},
{
"epoch": 2.174523007856341,
"grad_norm": 3.6868786811828613,
"learning_rate": 5.512076390189104e-05,
"loss": 1.4362,
"step": 3875
},
{
"epoch": 2.1885521885521886,
"grad_norm": 3.3047056198120117,
"learning_rate": 5.418460962366598e-05,
"loss": 1.463,
"step": 3900
},
{
"epoch": 2.202581369248036,
"grad_norm": 2.219902515411377,
"learning_rate": 5.324845534544093e-05,
"loss": 1.4488,
"step": 3925
},
{
"epoch": 2.2166105499438835,
"grad_norm": 2.3671610355377197,
"learning_rate": 5.231230106721587e-05,
"loss": 1.4415,
"step": 3950
},
{
"epoch": 2.2306397306397305,
"grad_norm": 2.889744997024536,
"learning_rate": 5.137614678899083e-05,
"loss": 1.4767,
"step": 3975
},
{
"epoch": 2.244668911335578,
"grad_norm": 3.9740052223205566,
"learning_rate": 5.0439992510765776e-05,
"loss": 1.4877,
"step": 4000
},
{
"epoch": 2.2586980920314255,
"grad_norm": 3.672966718673706,
"learning_rate": 4.950383823254072e-05,
"loss": 1.5627,
"step": 4025
},
{
"epoch": 2.2727272727272725,
"grad_norm": 1.8203610181808472,
"learning_rate": 4.856768395431567e-05,
"loss": 1.5156,
"step": 4050
},
{
"epoch": 2.28675645342312,
"grad_norm": 3.0803072452545166,
"learning_rate": 4.763152967609062e-05,
"loss": 1.3682,
"step": 4075
},
{
"epoch": 2.3007856341189674,
"grad_norm": 2.7862794399261475,
"learning_rate": 4.669537539786557e-05,
"loss": 1.5184,
"step": 4100
},
{
"epoch": 2.314814814814815,
"grad_norm": 2.9466044902801514,
"learning_rate": 4.5759221119640515e-05,
"loss": 1.508,
"step": 4125
},
{
"epoch": 2.3288439955106623,
"grad_norm": 2.687072515487671,
"learning_rate": 4.482306684141547e-05,
"loss": 1.4555,
"step": 4150
},
{
"epoch": 2.3428731762065094,
"grad_norm": 2.7922186851501465,
"learning_rate": 4.388691256319041e-05,
"loss": 1.4884,
"step": 4175
},
{
"epoch": 2.356902356902357,
"grad_norm": 4.470178604125977,
"learning_rate": 4.2950758284965364e-05,
"loss": 1.5546,
"step": 4200
},
{
"epoch": 2.3709315375982043,
"grad_norm": 2.3221213817596436,
"learning_rate": 4.201460400674031e-05,
"loss": 1.4637,
"step": 4225
},
{
"epoch": 2.3849607182940518,
"grad_norm": 3.1816506385803223,
"learning_rate": 4.107844972851526e-05,
"loss": 1.5608,
"step": 4250
},
{
"epoch": 2.398989898989899,
"grad_norm": 2.7296078205108643,
"learning_rate": 4.0142295450290206e-05,
"loss": 1.3226,
"step": 4275
},
{
"epoch": 2.4130190796857462,
"grad_norm": 3.251237392425537,
"learning_rate": 3.920614117206516e-05,
"loss": 1.465,
"step": 4300
},
{
"epoch": 2.4270482603815937,
"grad_norm": 4.027979373931885,
"learning_rate": 3.82699868938401e-05,
"loss": 1.6209,
"step": 4325
},
{
"epoch": 2.441077441077441,
"grad_norm": 3.3260245323181152,
"learning_rate": 3.7333832615615054e-05,
"loss": 1.5355,
"step": 4350
},
{
"epoch": 2.4551066217732886,
"grad_norm": 2.058065176010132,
"learning_rate": 3.639767833739e-05,
"loss": 1.3601,
"step": 4375
},
{
"epoch": 2.4691358024691357,
"grad_norm": 3.329512357711792,
"learning_rate": 3.546152405916495e-05,
"loss": 1.5068,
"step": 4400
},
{
"epoch": 2.483164983164983,
"grad_norm": 3.266387939453125,
"learning_rate": 3.4525369780939896e-05,
"loss": 1.4405,
"step": 4425
},
{
"epoch": 2.4971941638608306,
"grad_norm": 5.186097621917725,
"learning_rate": 3.358921550271485e-05,
"loss": 1.5836,
"step": 4450
},
{
"epoch": 2.5112233445566776,
"grad_norm": 3.0164802074432373,
"learning_rate": 3.265306122448979e-05,
"loss": 1.5984,
"step": 4475
},
{
"epoch": 2.525252525252525,
"grad_norm": 3.318781614303589,
"learning_rate": 3.1716906946264745e-05,
"loss": 1.3847,
"step": 4500
},
{
"epoch": 2.5392817059483725,
"grad_norm": 4.612998008728027,
"learning_rate": 3.078075266803969e-05,
"loss": 1.4501,
"step": 4525
},
{
"epoch": 2.55331088664422,
"grad_norm": 3.4765076637268066,
"learning_rate": 2.9844598389814642e-05,
"loss": 1.4231,
"step": 4550
},
{
"epoch": 2.5673400673400675,
"grad_norm": 3.3265297412872314,
"learning_rate": 2.8908444111589587e-05,
"loss": 1.4242,
"step": 4575
},
{
"epoch": 2.581369248035915,
"grad_norm": 2.7402238845825195,
"learning_rate": 2.797228983336454e-05,
"loss": 1.4465,
"step": 4600
},
{
"epoch": 2.595398428731762,
"grad_norm": 5.435632705688477,
"learning_rate": 2.7036135555139487e-05,
"loss": 1.3818,
"step": 4625
},
{
"epoch": 2.6094276094276094,
"grad_norm": 2.3987693786621094,
"learning_rate": 2.6099981276914436e-05,
"loss": 1.4718,
"step": 4650
},
{
"epoch": 2.623456790123457,
"grad_norm": 3.6313223838806152,
"learning_rate": 2.5163826998689384e-05,
"loss": 1.4459,
"step": 4675
},
{
"epoch": 2.637485970819304,
"grad_norm": 3.2163851261138916,
"learning_rate": 2.4227672720464333e-05,
"loss": 1.4328,
"step": 4700
},
{
"epoch": 2.6515151515151514,
"grad_norm": 3.153942823410034,
"learning_rate": 2.329151844223928e-05,
"loss": 1.5129,
"step": 4725
},
{
"epoch": 2.665544332210999,
"grad_norm": 3.770242214202881,
"learning_rate": 2.235536416401423e-05,
"loss": 1.4605,
"step": 4750
},
{
"epoch": 2.6795735129068463,
"grad_norm": 3.060556650161743,
"learning_rate": 2.1419209885789178e-05,
"loss": 1.3396,
"step": 4775
},
{
"epoch": 2.6936026936026938,
"grad_norm": 3.023381233215332,
"learning_rate": 2.0483055607564127e-05,
"loss": 1.5085,
"step": 4800
},
{
"epoch": 2.7076318742985412,
"grad_norm": 4.56315279006958,
"learning_rate": 1.9546901329339075e-05,
"loss": 1.5112,
"step": 4825
},
{
"epoch": 2.7216610549943883,
"grad_norm": 2.4291975498199463,
"learning_rate": 1.8610747051114023e-05,
"loss": 1.5944,
"step": 4850
},
{
"epoch": 2.7356902356902357,
"grad_norm": 5.105324745178223,
"learning_rate": 1.7674592772888972e-05,
"loss": 1.4247,
"step": 4875
},
{
"epoch": 2.749719416386083,
"grad_norm": 0.40811389684677124,
"learning_rate": 1.673843849466392e-05,
"loss": 1.5592,
"step": 4900
},
{
"epoch": 2.76374859708193,
"grad_norm": 3.125684976577759,
"learning_rate": 1.580228421643887e-05,
"loss": 1.4624,
"step": 4925
},
{
"epoch": 2.7777777777777777,
"grad_norm": 4.835251808166504,
"learning_rate": 1.4866129938213819e-05,
"loss": 1.53,
"step": 4950
},
{
"epoch": 2.791806958473625,
"grad_norm": 3.4443600177764893,
"learning_rate": 1.3929975659988767e-05,
"loss": 1.451,
"step": 4975
},
{
"epoch": 2.8058361391694726,
"grad_norm": 4.375021457672119,
"learning_rate": 1.2993821381763716e-05,
"loss": 1.5138,
"step": 5000
},
{
"epoch": 2.81986531986532,
"grad_norm": 3.567558526992798,
"learning_rate": 1.2057667103538664e-05,
"loss": 1.2718,
"step": 5025
},
{
"epoch": 2.833894500561167,
"grad_norm": 2.118828296661377,
"learning_rate": 1.1121512825313613e-05,
"loss": 1.2864,
"step": 5050
},
{
"epoch": 2.8479236812570146,
"grad_norm": 4.578852653503418,
"learning_rate": 1.0185358547088561e-05,
"loss": 1.395,
"step": 5075
},
{
"epoch": 2.861952861952862,
"grad_norm": 3.704861879348755,
"learning_rate": 9.24920426886351e-06,
"loss": 1.5031,
"step": 5100
},
{
"epoch": 2.875982042648709,
"grad_norm": 2.3843753337860107,
"learning_rate": 8.313049990638458e-06,
"loss": 1.3728,
"step": 5125
},
{
"epoch": 2.8900112233445565,
"grad_norm": 4.627071857452393,
"learning_rate": 7.3768957124134065e-06,
"loss": 1.4112,
"step": 5150
},
{
"epoch": 2.904040404040404,
"grad_norm": 4.36806058883667,
"learning_rate": 6.440741434188355e-06,
"loss": 1.3077,
"step": 5175
},
{
"epoch": 2.9180695847362514,
"grad_norm": 1.2492505311965942,
"learning_rate": 5.504587155963303e-06,
"loss": 1.279,
"step": 5200
},
{
"epoch": 2.932098765432099,
"grad_norm": 4.823546409606934,
"learning_rate": 4.568432877738252e-06,
"loss": 1.3955,
"step": 5225
},
{
"epoch": 2.9461279461279464,
"grad_norm": 2.1844117641448975,
"learning_rate": 3.6322785995132002e-06,
"loss": 1.4355,
"step": 5250
},
{
"epoch": 2.9601571268237934,
"grad_norm": 4.229171276092529,
"learning_rate": 2.6961243212881483e-06,
"loss": 1.3527,
"step": 5275
},
{
"epoch": 2.974186307519641,
"grad_norm": 4.21515417098999,
"learning_rate": 1.759970043063097e-06,
"loss": 1.3262,
"step": 5300
},
{
"epoch": 2.9882154882154883,
"grad_norm": 3.508025884628296,
"learning_rate": 8.238157648380454e-07,
"loss": 1.3869,
"step": 5325
}
],
"logging_steps": 25,
"max_steps": 5346,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.0786298856312115e+17,
"train_batch_size": 6,
"trial_name": null,
"trial_params": null
}