SALAMA_NEWMED2 / checkpoint-2500 /trainer_state.json
EYEDOL's picture
Upload folder using huggingface_hub
736dcae verified
{
"best_global_step": 2500,
"best_metric": 8.384208161162686,
"best_model_checkpoint": "./SALAMA_NEWMED2/checkpoint-2500",
"epoch": 0.9987016878058524,
"eval_steps": 2500,
"global_step": 2500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00399480675122341,
"grad_norm": 4.626885890960693,
"learning_rate": 1.8e-07,
"loss": 0.157,
"step": 10
},
{
"epoch": 0.00798961350244682,
"grad_norm": 4.6383256912231445,
"learning_rate": 3.8e-07,
"loss": 0.1612,
"step": 20
},
{
"epoch": 0.011984420253670228,
"grad_norm": 4.137997627258301,
"learning_rate": 5.800000000000001e-07,
"loss": 0.1601,
"step": 30
},
{
"epoch": 0.01597922700489364,
"grad_norm": 4.052377223968506,
"learning_rate": 7.8e-07,
"loss": 0.166,
"step": 40
},
{
"epoch": 0.019974033756117046,
"grad_norm": 4.292566299438477,
"learning_rate": 9.800000000000001e-07,
"loss": 0.1519,
"step": 50
},
{
"epoch": 0.023968840507340456,
"grad_norm": 5.116729736328125,
"learning_rate": 1.1800000000000001e-06,
"loss": 0.1697,
"step": 60
},
{
"epoch": 0.027963647258563866,
"grad_norm": 4.941128253936768,
"learning_rate": 1.3800000000000001e-06,
"loss": 0.1338,
"step": 70
},
{
"epoch": 0.03195845400978728,
"grad_norm": 4.003450870513916,
"learning_rate": 1.5800000000000001e-06,
"loss": 0.1506,
"step": 80
},
{
"epoch": 0.035953260761010686,
"grad_norm": 4.099050521850586,
"learning_rate": 1.7800000000000001e-06,
"loss": 0.128,
"step": 90
},
{
"epoch": 0.03994806751223409,
"grad_norm": 3.813715696334839,
"learning_rate": 1.98e-06,
"loss": 0.1519,
"step": 100
},
{
"epoch": 0.043942874263457506,
"grad_norm": 3.1228017807006836,
"learning_rate": 2.1800000000000003e-06,
"loss": 0.1602,
"step": 110
},
{
"epoch": 0.04793768101468091,
"grad_norm": 3.2234630584716797,
"learning_rate": 2.38e-06,
"loss": 0.1573,
"step": 120
},
{
"epoch": 0.051932487765904325,
"grad_norm": 3.196345806121826,
"learning_rate": 2.5800000000000003e-06,
"loss": 0.1594,
"step": 130
},
{
"epoch": 0.05592729451712773,
"grad_norm": 3.428943634033203,
"learning_rate": 2.7800000000000005e-06,
"loss": 0.1528,
"step": 140
},
{
"epoch": 0.059922101268351145,
"grad_norm": 4.3613505363464355,
"learning_rate": 2.9800000000000003e-06,
"loss": 0.1764,
"step": 150
},
{
"epoch": 0.06391690801957456,
"grad_norm": 3.6310315132141113,
"learning_rate": 3.1800000000000005e-06,
"loss": 0.1472,
"step": 160
},
{
"epoch": 0.06791171477079797,
"grad_norm": 4.561097621917725,
"learning_rate": 3.3800000000000007e-06,
"loss": 0.158,
"step": 170
},
{
"epoch": 0.07190652152202137,
"grad_norm": 2.8412837982177734,
"learning_rate": 3.58e-06,
"loss": 0.1561,
"step": 180
},
{
"epoch": 0.07590132827324478,
"grad_norm": 3.9358673095703125,
"learning_rate": 3.7800000000000002e-06,
"loss": 0.1682,
"step": 190
},
{
"epoch": 0.07989613502446818,
"grad_norm": 3.9661779403686523,
"learning_rate": 3.980000000000001e-06,
"loss": 0.1375,
"step": 200
},
{
"epoch": 0.0838909417756916,
"grad_norm": 4.195302963256836,
"learning_rate": 4.18e-06,
"loss": 0.1557,
"step": 210
},
{
"epoch": 0.08788574852691501,
"grad_norm": 4.319852352142334,
"learning_rate": 4.38e-06,
"loss": 0.1828,
"step": 220
},
{
"epoch": 0.09188055527813842,
"grad_norm": 3.6552112102508545,
"learning_rate": 4.58e-06,
"loss": 0.1814,
"step": 230
},
{
"epoch": 0.09587536202936182,
"grad_norm": 4.2657365798950195,
"learning_rate": 4.78e-06,
"loss": 0.1829,
"step": 240
},
{
"epoch": 0.09987016878058524,
"grad_norm": 4.424646377563477,
"learning_rate": 4.980000000000001e-06,
"loss": 0.1734,
"step": 250
},
{
"epoch": 0.10386497553180865,
"grad_norm": 3.4387409687042236,
"learning_rate": 5.18e-06,
"loss": 0.1548,
"step": 260
},
{
"epoch": 0.10785978228303206,
"grad_norm": 3.510521173477173,
"learning_rate": 5.380000000000001e-06,
"loss": 0.1401,
"step": 270
},
{
"epoch": 0.11185458903425546,
"grad_norm": 3.2406973838806152,
"learning_rate": 5.580000000000001e-06,
"loss": 0.1573,
"step": 280
},
{
"epoch": 0.11584939578547888,
"grad_norm": 3.9534926414489746,
"learning_rate": 5.78e-06,
"loss": 0.1768,
"step": 290
},
{
"epoch": 0.11984420253670229,
"grad_norm": 4.111250400543213,
"learning_rate": 5.98e-06,
"loss": 0.1754,
"step": 300
},
{
"epoch": 0.1238390092879257,
"grad_norm": 3.7990970611572266,
"learning_rate": 6.18e-06,
"loss": 0.1594,
"step": 310
},
{
"epoch": 0.12783381603914912,
"grad_norm": 3.062520980834961,
"learning_rate": 6.380000000000001e-06,
"loss": 0.1506,
"step": 320
},
{
"epoch": 0.13182862279037252,
"grad_norm": 4.121057987213135,
"learning_rate": 6.5800000000000005e-06,
"loss": 0.1713,
"step": 330
},
{
"epoch": 0.13582342954159593,
"grad_norm": 3.127715826034546,
"learning_rate": 6.780000000000001e-06,
"loss": 0.1598,
"step": 340
},
{
"epoch": 0.13981823629281934,
"grad_norm": 3.2151925563812256,
"learning_rate": 6.98e-06,
"loss": 0.1491,
"step": 350
},
{
"epoch": 0.14381304304404274,
"grad_norm": 2.5872230529785156,
"learning_rate": 7.180000000000001e-06,
"loss": 0.1691,
"step": 360
},
{
"epoch": 0.14780784979526615,
"grad_norm": 4.3994059562683105,
"learning_rate": 7.3800000000000005e-06,
"loss": 0.1867,
"step": 370
},
{
"epoch": 0.15180265654648956,
"grad_norm": 4.190740585327148,
"learning_rate": 7.58e-06,
"loss": 0.1752,
"step": 380
},
{
"epoch": 0.15579746329771296,
"grad_norm": 4.055376052856445,
"learning_rate": 7.78e-06,
"loss": 0.1728,
"step": 390
},
{
"epoch": 0.15979227004893637,
"grad_norm": 4.507293224334717,
"learning_rate": 7.980000000000002e-06,
"loss": 0.1755,
"step": 400
},
{
"epoch": 0.1637870768001598,
"grad_norm": 3.873270034790039,
"learning_rate": 8.18e-06,
"loss": 0.1678,
"step": 410
},
{
"epoch": 0.1677818835513832,
"grad_norm": 4.0611491203308105,
"learning_rate": 8.380000000000001e-06,
"loss": 0.1682,
"step": 420
},
{
"epoch": 0.17177669030260662,
"grad_norm": 3.7498176097869873,
"learning_rate": 8.580000000000001e-06,
"loss": 0.1621,
"step": 430
},
{
"epoch": 0.17577149705383002,
"grad_norm": 2.854010581970215,
"learning_rate": 8.78e-06,
"loss": 0.1514,
"step": 440
},
{
"epoch": 0.17976630380505343,
"grad_norm": 3.098860025405884,
"learning_rate": 8.98e-06,
"loss": 0.1386,
"step": 450
},
{
"epoch": 0.18376111055627684,
"grad_norm": 5.511478424072266,
"learning_rate": 9.180000000000002e-06,
"loss": 0.176,
"step": 460
},
{
"epoch": 0.18775591730750024,
"grad_norm": 3.8882782459259033,
"learning_rate": 9.38e-06,
"loss": 0.1742,
"step": 470
},
{
"epoch": 0.19175072405872365,
"grad_norm": 5.161660194396973,
"learning_rate": 9.58e-06,
"loss": 0.1495,
"step": 480
},
{
"epoch": 0.19574553080994708,
"grad_norm": 3.9864652156829834,
"learning_rate": 9.780000000000001e-06,
"loss": 0.1869,
"step": 490
},
{
"epoch": 0.1997403375611705,
"grad_norm": 3.3368163108825684,
"learning_rate": 9.980000000000001e-06,
"loss": 0.1589,
"step": 500
},
{
"epoch": 0.2037351443123939,
"grad_norm": 4.298588752746582,
"learning_rate": 9.98716486023959e-06,
"loss": 0.1675,
"step": 510
},
{
"epoch": 0.2077299510636173,
"grad_norm": 3.3363137245178223,
"learning_rate": 9.972903593839133e-06,
"loss": 0.186,
"step": 520
},
{
"epoch": 0.2117247578148407,
"grad_norm": 3.345853090286255,
"learning_rate": 9.958642327438678e-06,
"loss": 0.1736,
"step": 530
},
{
"epoch": 0.21571956456606411,
"grad_norm": 3.282945156097412,
"learning_rate": 9.944381061038221e-06,
"loss": 0.1711,
"step": 540
},
{
"epoch": 0.21971437131728752,
"grad_norm": 5.176159858703613,
"learning_rate": 9.930119794637765e-06,
"loss": 0.2024,
"step": 550
},
{
"epoch": 0.22370917806851093,
"grad_norm": 3.2021000385284424,
"learning_rate": 9.91585852823731e-06,
"loss": 0.1515,
"step": 560
},
{
"epoch": 0.22770398481973433,
"grad_norm": 3.4485816955566406,
"learning_rate": 9.901597261836851e-06,
"loss": 0.1673,
"step": 570
},
{
"epoch": 0.23169879157095777,
"grad_norm": 3.681199789047241,
"learning_rate": 9.887335995436396e-06,
"loss": 0.1713,
"step": 580
},
{
"epoch": 0.23569359832218117,
"grad_norm": 4.410940647125244,
"learning_rate": 9.87307472903594e-06,
"loss": 0.1653,
"step": 590
},
{
"epoch": 0.23968840507340458,
"grad_norm": 3.284226655960083,
"learning_rate": 9.858813462635483e-06,
"loss": 0.1442,
"step": 600
},
{
"epoch": 0.243683211824628,
"grad_norm": 3.73500394821167,
"learning_rate": 9.844552196235026e-06,
"loss": 0.1851,
"step": 610
},
{
"epoch": 0.2476780185758514,
"grad_norm": 4.125741958618164,
"learning_rate": 9.83029092983457e-06,
"loss": 0.1965,
"step": 620
},
{
"epoch": 0.2516728253270748,
"grad_norm": 4.371180057525635,
"learning_rate": 9.816029663434114e-06,
"loss": 0.1623,
"step": 630
},
{
"epoch": 0.25566763207829823,
"grad_norm": 5.782146453857422,
"learning_rate": 9.801768397033657e-06,
"loss": 0.1821,
"step": 640
},
{
"epoch": 0.2596624388295216,
"grad_norm": 3.964480400085449,
"learning_rate": 9.787507130633202e-06,
"loss": 0.1726,
"step": 650
},
{
"epoch": 0.26365724558074505,
"grad_norm": 3.8023924827575684,
"learning_rate": 9.773245864232744e-06,
"loss": 0.1771,
"step": 660
},
{
"epoch": 0.2676520523319684,
"grad_norm": 3.616413116455078,
"learning_rate": 9.758984597832289e-06,
"loss": 0.1913,
"step": 670
},
{
"epoch": 0.27164685908319186,
"grad_norm": 3.1415390968322754,
"learning_rate": 9.744723331431832e-06,
"loss": 0.1783,
"step": 680
},
{
"epoch": 0.27564166583441524,
"grad_norm": 3.9891934394836426,
"learning_rate": 9.730462065031375e-06,
"loss": 0.1686,
"step": 690
},
{
"epoch": 0.2796364725856387,
"grad_norm": 3.3664565086364746,
"learning_rate": 9.71620079863092e-06,
"loss": 0.1928,
"step": 700
},
{
"epoch": 0.28363127933686205,
"grad_norm": 3.149634599685669,
"learning_rate": 9.701939532230463e-06,
"loss": 0.1839,
"step": 710
},
{
"epoch": 0.2876260860880855,
"grad_norm": 3.469912052154541,
"learning_rate": 9.687678265830007e-06,
"loss": 0.1699,
"step": 720
},
{
"epoch": 0.2916208928393089,
"grad_norm": 4.075517654418945,
"learning_rate": 9.67341699942955e-06,
"loss": 0.1807,
"step": 730
},
{
"epoch": 0.2956156995905323,
"grad_norm": 3.139695405960083,
"learning_rate": 9.659155733029095e-06,
"loss": 0.1618,
"step": 740
},
{
"epoch": 0.29961050634175573,
"grad_norm": 4.296646595001221,
"learning_rate": 9.644894466628636e-06,
"loss": 0.1912,
"step": 750
},
{
"epoch": 0.3036053130929791,
"grad_norm": 4.1294379234313965,
"learning_rate": 9.630633200228181e-06,
"loss": 0.1559,
"step": 760
},
{
"epoch": 0.30760011984420255,
"grad_norm": 3.1996731758117676,
"learning_rate": 9.616371933827725e-06,
"loss": 0.1868,
"step": 770
},
{
"epoch": 0.3115949265954259,
"grad_norm": 3.0223231315612793,
"learning_rate": 9.602110667427268e-06,
"loss": 0.176,
"step": 780
},
{
"epoch": 0.31558973334664936,
"grad_norm": 3.7960736751556396,
"learning_rate": 9.587849401026813e-06,
"loss": 0.1617,
"step": 790
},
{
"epoch": 0.31958454009787274,
"grad_norm": 3.7317583560943604,
"learning_rate": 9.573588134626356e-06,
"loss": 0.1461,
"step": 800
},
{
"epoch": 0.32357934684909617,
"grad_norm": 2.9020590782165527,
"learning_rate": 9.5593268682259e-06,
"loss": 0.1498,
"step": 810
},
{
"epoch": 0.3275741536003196,
"grad_norm": 3.0017457008361816,
"learning_rate": 9.545065601825442e-06,
"loss": 0.143,
"step": 820
},
{
"epoch": 0.331568960351543,
"grad_norm": 2.892198324203491,
"learning_rate": 9.530804335424987e-06,
"loss": 0.1781,
"step": 830
},
{
"epoch": 0.3355637671027664,
"grad_norm": 3.424417734146118,
"learning_rate": 9.516543069024529e-06,
"loss": 0.1565,
"step": 840
},
{
"epoch": 0.3395585738539898,
"grad_norm": 3.7211880683898926,
"learning_rate": 9.502281802624074e-06,
"loss": 0.1654,
"step": 850
},
{
"epoch": 0.34355338060521323,
"grad_norm": 3.9410080909729004,
"learning_rate": 9.488020536223617e-06,
"loss": 0.17,
"step": 860
},
{
"epoch": 0.3475481873564366,
"grad_norm": 3.70001482963562,
"learning_rate": 9.47375926982316e-06,
"loss": 0.166,
"step": 870
},
{
"epoch": 0.35154299410766005,
"grad_norm": 4.225161075592041,
"learning_rate": 9.459498003422705e-06,
"loss": 0.1844,
"step": 880
},
{
"epoch": 0.3555378008588834,
"grad_norm": 3.4117069244384766,
"learning_rate": 9.445236737022249e-06,
"loss": 0.1746,
"step": 890
},
{
"epoch": 0.35953260761010686,
"grad_norm": 3.593611717224121,
"learning_rate": 9.430975470621792e-06,
"loss": 0.1696,
"step": 900
},
{
"epoch": 0.3635274143613303,
"grad_norm": 4.341125965118408,
"learning_rate": 9.416714204221335e-06,
"loss": 0.1627,
"step": 910
},
{
"epoch": 0.36752222111255367,
"grad_norm": 4.328516960144043,
"learning_rate": 9.40245293782088e-06,
"loss": 0.1672,
"step": 920
},
{
"epoch": 0.3715170278637771,
"grad_norm": 3.9491498470306396,
"learning_rate": 9.388191671420423e-06,
"loss": 0.1741,
"step": 930
},
{
"epoch": 0.3755118346150005,
"grad_norm": 3.0523922443389893,
"learning_rate": 9.373930405019966e-06,
"loss": 0.1609,
"step": 940
},
{
"epoch": 0.3795066413662239,
"grad_norm": 3.96618914604187,
"learning_rate": 9.35966913861951e-06,
"loss": 0.1508,
"step": 950
},
{
"epoch": 0.3835014481174473,
"grad_norm": 3.1924965381622314,
"learning_rate": 9.345407872219053e-06,
"loss": 0.155,
"step": 960
},
{
"epoch": 0.38749625486867073,
"grad_norm": 4.26977014541626,
"learning_rate": 9.331146605818598e-06,
"loss": 0.1971,
"step": 970
},
{
"epoch": 0.39149106161989417,
"grad_norm": 3.3444809913635254,
"learning_rate": 9.316885339418141e-06,
"loss": 0.1643,
"step": 980
},
{
"epoch": 0.39548586837111754,
"grad_norm": 3.90372371673584,
"learning_rate": 9.302624073017684e-06,
"loss": 0.1655,
"step": 990
},
{
"epoch": 0.399480675122341,
"grad_norm": 3.3378031253814697,
"learning_rate": 9.288362806617228e-06,
"loss": 0.1752,
"step": 1000
},
{
"epoch": 0.40347548187356436,
"grad_norm": 4.0089826583862305,
"learning_rate": 9.274101540216773e-06,
"loss": 0.1616,
"step": 1010
},
{
"epoch": 0.4074702886247878,
"grad_norm": 3.947985887527466,
"learning_rate": 9.259840273816316e-06,
"loss": 0.1576,
"step": 1020
},
{
"epoch": 0.41146509537601117,
"grad_norm": 3.615910768508911,
"learning_rate": 9.245579007415859e-06,
"loss": 0.1516,
"step": 1030
},
{
"epoch": 0.4154599021272346,
"grad_norm": 4.045357704162598,
"learning_rate": 9.231317741015402e-06,
"loss": 0.1795,
"step": 1040
},
{
"epoch": 0.419454708878458,
"grad_norm": 3.5269615650177,
"learning_rate": 9.217056474614946e-06,
"loss": 0.152,
"step": 1050
},
{
"epoch": 0.4234495156296814,
"grad_norm": 3.298372983932495,
"learning_rate": 9.20279520821449e-06,
"loss": 0.1544,
"step": 1060
},
{
"epoch": 0.42744432238090485,
"grad_norm": 3.652780055999756,
"learning_rate": 9.188533941814034e-06,
"loss": 0.1497,
"step": 1070
},
{
"epoch": 0.43143912913212823,
"grad_norm": 3.1677920818328857,
"learning_rate": 9.174272675413579e-06,
"loss": 0.1817,
"step": 1080
},
{
"epoch": 0.43543393588335166,
"grad_norm": 3.229947328567505,
"learning_rate": 9.16001140901312e-06,
"loss": 0.1438,
"step": 1090
},
{
"epoch": 0.43942874263457504,
"grad_norm": 4.125128269195557,
"learning_rate": 9.145750142612665e-06,
"loss": 0.1547,
"step": 1100
},
{
"epoch": 0.4434235493857985,
"grad_norm": 3.456726312637329,
"learning_rate": 9.131488876212208e-06,
"loss": 0.1731,
"step": 1110
},
{
"epoch": 0.44741835613702186,
"grad_norm": 3.4217751026153564,
"learning_rate": 9.117227609811752e-06,
"loss": 0.1505,
"step": 1120
},
{
"epoch": 0.4514131628882453,
"grad_norm": 2.805898666381836,
"learning_rate": 9.102966343411297e-06,
"loss": 0.1712,
"step": 1130
},
{
"epoch": 0.45540796963946867,
"grad_norm": 3.565091133117676,
"learning_rate": 9.08870507701084e-06,
"loss": 0.1914,
"step": 1140
},
{
"epoch": 0.4594027763906921,
"grad_norm": 3.3190317153930664,
"learning_rate": 9.074443810610383e-06,
"loss": 0.1567,
"step": 1150
},
{
"epoch": 0.46339758314191554,
"grad_norm": 3.5254857540130615,
"learning_rate": 9.060182544209926e-06,
"loss": 0.1694,
"step": 1160
},
{
"epoch": 0.4673923898931389,
"grad_norm": 4.437207221984863,
"learning_rate": 9.045921277809471e-06,
"loss": 0.1671,
"step": 1170
},
{
"epoch": 0.47138719664436235,
"grad_norm": 4.538163185119629,
"learning_rate": 9.031660011409013e-06,
"loss": 0.1825,
"step": 1180
},
{
"epoch": 0.47538200339558573,
"grad_norm": 3.300701141357422,
"learning_rate": 9.017398745008558e-06,
"loss": 0.1576,
"step": 1190
},
{
"epoch": 0.47937681014680916,
"grad_norm": 3.468872308731079,
"learning_rate": 9.003137478608101e-06,
"loss": 0.1403,
"step": 1200
},
{
"epoch": 0.48337161689803254,
"grad_norm": 3.9454572200775146,
"learning_rate": 8.988876212207644e-06,
"loss": 0.1604,
"step": 1210
},
{
"epoch": 0.487366423649256,
"grad_norm": 4.445584297180176,
"learning_rate": 8.97461494580719e-06,
"loss": 0.1432,
"step": 1220
},
{
"epoch": 0.49136123040047935,
"grad_norm": 4.145312309265137,
"learning_rate": 8.960353679406733e-06,
"loss": 0.1668,
"step": 1230
},
{
"epoch": 0.4953560371517028,
"grad_norm": 4.525082588195801,
"learning_rate": 8.946092413006276e-06,
"loss": 0.1788,
"step": 1240
},
{
"epoch": 0.4993508439029262,
"grad_norm": 3.0187716484069824,
"learning_rate": 8.931831146605819e-06,
"loss": 0.1603,
"step": 1250
},
{
"epoch": 0.5033456506541496,
"grad_norm": 3.865370750427246,
"learning_rate": 8.917569880205364e-06,
"loss": 0.1598,
"step": 1260
},
{
"epoch": 0.507340457405373,
"grad_norm": 3.384380340576172,
"learning_rate": 8.903308613804906e-06,
"loss": 0.1705,
"step": 1270
},
{
"epoch": 0.5113352641565965,
"grad_norm": 2.700723648071289,
"learning_rate": 8.88904734740445e-06,
"loss": 0.1496,
"step": 1280
},
{
"epoch": 0.5153300709078198,
"grad_norm": 3.309508800506592,
"learning_rate": 8.874786081003994e-06,
"loss": 0.186,
"step": 1290
},
{
"epoch": 0.5193248776590432,
"grad_norm": 4.151066303253174,
"learning_rate": 8.860524814603537e-06,
"loss": 0.1506,
"step": 1300
},
{
"epoch": 0.5233196844102667,
"grad_norm": 2.7802693843841553,
"learning_rate": 8.846263548203082e-06,
"loss": 0.1537,
"step": 1310
},
{
"epoch": 0.5273144911614901,
"grad_norm": 3.3392629623413086,
"learning_rate": 8.832002281802625e-06,
"loss": 0.1521,
"step": 1320
},
{
"epoch": 0.5313092979127134,
"grad_norm": 3.8402390480041504,
"learning_rate": 8.817741015402168e-06,
"loss": 0.1374,
"step": 1330
},
{
"epoch": 0.5353041046639369,
"grad_norm": 4.220055103302002,
"learning_rate": 8.803479749001712e-06,
"loss": 0.1801,
"step": 1340
},
{
"epoch": 0.5392989114151603,
"grad_norm": 4.223282814025879,
"learning_rate": 8.789218482601257e-06,
"loss": 0.152,
"step": 1350
},
{
"epoch": 0.5432937181663837,
"grad_norm": 3.0471835136413574,
"learning_rate": 8.7749572162008e-06,
"loss": 0.1593,
"step": 1360
},
{
"epoch": 0.5472885249176072,
"grad_norm": 4.133998870849609,
"learning_rate": 8.760695949800343e-06,
"loss": 0.1941,
"step": 1370
},
{
"epoch": 0.5512833316688305,
"grad_norm": 4.070934295654297,
"learning_rate": 8.746434683399886e-06,
"loss": 0.1786,
"step": 1380
},
{
"epoch": 0.5552781384200539,
"grad_norm": 3.6335694789886475,
"learning_rate": 8.73217341699943e-06,
"loss": 0.1446,
"step": 1390
},
{
"epoch": 0.5592729451712773,
"grad_norm": 2.9351556301116943,
"learning_rate": 8.717912150598975e-06,
"loss": 0.1605,
"step": 1400
},
{
"epoch": 0.5632677519225008,
"grad_norm": 4.384500503540039,
"learning_rate": 8.703650884198518e-06,
"loss": 0.1785,
"step": 1410
},
{
"epoch": 0.5672625586737241,
"grad_norm": 4.204753398895264,
"learning_rate": 8.689389617798061e-06,
"loss": 0.1555,
"step": 1420
},
{
"epoch": 0.5712573654249475,
"grad_norm": 2.983480930328369,
"learning_rate": 8.675128351397604e-06,
"loss": 0.1642,
"step": 1430
},
{
"epoch": 0.575252172176171,
"grad_norm": 3.3433070182800293,
"learning_rate": 8.66086708499715e-06,
"loss": 0.1542,
"step": 1440
},
{
"epoch": 0.5792469789273944,
"grad_norm": 3.154055118560791,
"learning_rate": 8.646605818596692e-06,
"loss": 0.1654,
"step": 1450
},
{
"epoch": 0.5832417856786178,
"grad_norm": 2.8929858207702637,
"learning_rate": 8.632344552196236e-06,
"loss": 0.1423,
"step": 1460
},
{
"epoch": 0.5872365924298412,
"grad_norm": 3.4675965309143066,
"learning_rate": 8.618083285795779e-06,
"loss": 0.1503,
"step": 1470
},
{
"epoch": 0.5912313991810646,
"grad_norm": 3.996819019317627,
"learning_rate": 8.603822019395322e-06,
"loss": 0.1678,
"step": 1480
},
{
"epoch": 0.595226205932288,
"grad_norm": 4.100089073181152,
"learning_rate": 8.589560752994867e-06,
"loss": 0.1576,
"step": 1490
},
{
"epoch": 0.5992210126835115,
"grad_norm": 3.0981807708740234,
"learning_rate": 8.57529948659441e-06,
"loss": 0.1528,
"step": 1500
},
{
"epoch": 0.6032158194347348,
"grad_norm": 3.0388245582580566,
"learning_rate": 8.561038220193954e-06,
"loss": 0.1529,
"step": 1510
},
{
"epoch": 0.6072106261859582,
"grad_norm": 3.5439846515655518,
"learning_rate": 8.546776953793497e-06,
"loss": 0.1543,
"step": 1520
},
{
"epoch": 0.6112054329371817,
"grad_norm": 4.001914978027344,
"learning_rate": 8.532515687393042e-06,
"loss": 0.1623,
"step": 1530
},
{
"epoch": 0.6152002396884051,
"grad_norm": 3.801597833633423,
"learning_rate": 8.518254420992585e-06,
"loss": 0.1343,
"step": 1540
},
{
"epoch": 0.6191950464396285,
"grad_norm": 3.7877979278564453,
"learning_rate": 8.503993154592128e-06,
"loss": 0.1556,
"step": 1550
},
{
"epoch": 0.6231898531908519,
"grad_norm": 3.0071709156036377,
"learning_rate": 8.489731888191672e-06,
"loss": 0.1354,
"step": 1560
},
{
"epoch": 0.6271846599420753,
"grad_norm": 2.960198163986206,
"learning_rate": 8.475470621791215e-06,
"loss": 0.1567,
"step": 1570
},
{
"epoch": 0.6311794666932987,
"grad_norm": 3.769070863723755,
"learning_rate": 8.46120935539076e-06,
"loss": 0.1392,
"step": 1580
},
{
"epoch": 0.6351742734445222,
"grad_norm": 4.615024089813232,
"learning_rate": 8.446948088990303e-06,
"loss": 0.17,
"step": 1590
},
{
"epoch": 0.6391690801957455,
"grad_norm": 2.945762872695923,
"learning_rate": 8.432686822589846e-06,
"loss": 0.1551,
"step": 1600
},
{
"epoch": 0.6431638869469689,
"grad_norm": 2.70076584815979,
"learning_rate": 8.41842555618939e-06,
"loss": 0.1404,
"step": 1610
},
{
"epoch": 0.6471586936981923,
"grad_norm": 3.891033172607422,
"learning_rate": 8.404164289788934e-06,
"loss": 0.142,
"step": 1620
},
{
"epoch": 0.6511535004494158,
"grad_norm": 3.177311658859253,
"learning_rate": 8.389903023388478e-06,
"loss": 0.1396,
"step": 1630
},
{
"epoch": 0.6551483072006392,
"grad_norm": 3.1945362091064453,
"learning_rate": 8.375641756988021e-06,
"loss": 0.1553,
"step": 1640
},
{
"epoch": 0.6591431139518625,
"grad_norm": 3.6035103797912598,
"learning_rate": 8.361380490587566e-06,
"loss": 0.1485,
"step": 1650
},
{
"epoch": 0.663137920703086,
"grad_norm": 2.8870794773101807,
"learning_rate": 8.347119224187107e-06,
"loss": 0.1467,
"step": 1660
},
{
"epoch": 0.6671327274543094,
"grad_norm": 2.89115834236145,
"learning_rate": 8.332857957786652e-06,
"loss": 0.1552,
"step": 1670
},
{
"epoch": 0.6711275342055328,
"grad_norm": 3.7482142448425293,
"learning_rate": 8.318596691386196e-06,
"loss": 0.1562,
"step": 1680
},
{
"epoch": 0.6751223409567562,
"grad_norm": 3.9963674545288086,
"learning_rate": 8.304335424985739e-06,
"loss": 0.1501,
"step": 1690
},
{
"epoch": 0.6791171477079796,
"grad_norm": 2.8433284759521484,
"learning_rate": 8.290074158585282e-06,
"loss": 0.147,
"step": 1700
},
{
"epoch": 0.683111954459203,
"grad_norm": 4.271126747131348,
"learning_rate": 8.275812892184827e-06,
"loss": 0.1608,
"step": 1710
},
{
"epoch": 0.6871067612104265,
"grad_norm": 3.0002920627593994,
"learning_rate": 8.26155162578437e-06,
"loss": 0.1688,
"step": 1720
},
{
"epoch": 0.6911015679616499,
"grad_norm": 2.862595796585083,
"learning_rate": 8.247290359383914e-06,
"loss": 0.1594,
"step": 1730
},
{
"epoch": 0.6950963747128732,
"grad_norm": 2.760641098022461,
"learning_rate": 8.233029092983458e-06,
"loss": 0.1498,
"step": 1740
},
{
"epoch": 0.6990911814640967,
"grad_norm": 3.4918088912963867,
"learning_rate": 8.218767826583002e-06,
"loss": 0.1644,
"step": 1750
},
{
"epoch": 0.7030859882153201,
"grad_norm": 3.4492998123168945,
"learning_rate": 8.204506560182545e-06,
"loss": 0.1415,
"step": 1760
},
{
"epoch": 0.7070807949665435,
"grad_norm": 3.371929407119751,
"learning_rate": 8.190245293782088e-06,
"loss": 0.1628,
"step": 1770
},
{
"epoch": 0.7110756017177668,
"grad_norm": 3.80068039894104,
"learning_rate": 8.175984027381633e-06,
"loss": 0.207,
"step": 1780
},
{
"epoch": 0.7150704084689903,
"grad_norm": 3.124359607696533,
"learning_rate": 8.161722760981175e-06,
"loss": 0.1328,
"step": 1790
},
{
"epoch": 0.7190652152202137,
"grad_norm": 3.6702919006347656,
"learning_rate": 8.14746149458072e-06,
"loss": 0.1752,
"step": 1800
},
{
"epoch": 0.7230600219714372,
"grad_norm": 2.942159414291382,
"learning_rate": 8.133200228180263e-06,
"loss": 0.1541,
"step": 1810
},
{
"epoch": 0.7270548287226606,
"grad_norm": 3.441380262374878,
"learning_rate": 8.118938961779806e-06,
"loss": 0.1513,
"step": 1820
},
{
"epoch": 0.7310496354738839,
"grad_norm": 3.824725866317749,
"learning_rate": 8.104677695379351e-06,
"loss": 0.1595,
"step": 1830
},
{
"epoch": 0.7350444422251073,
"grad_norm": 2.577880382537842,
"learning_rate": 8.090416428978894e-06,
"loss": 0.1526,
"step": 1840
},
{
"epoch": 0.7390392489763308,
"grad_norm": 3.2072184085845947,
"learning_rate": 8.076155162578438e-06,
"loss": 0.1601,
"step": 1850
},
{
"epoch": 0.7430340557275542,
"grad_norm": 3.3650033473968506,
"learning_rate": 8.06189389617798e-06,
"loss": 0.1527,
"step": 1860
},
{
"epoch": 0.7470288624787776,
"grad_norm": 3.575591564178467,
"learning_rate": 8.047632629777526e-06,
"loss": 0.1539,
"step": 1870
},
{
"epoch": 0.751023669230001,
"grad_norm": 3.2429842948913574,
"learning_rate": 8.033371363377069e-06,
"loss": 0.17,
"step": 1880
},
{
"epoch": 0.7550184759812244,
"grad_norm": 2.9099652767181396,
"learning_rate": 8.019110096976612e-06,
"loss": 0.1487,
"step": 1890
},
{
"epoch": 0.7590132827324478,
"grad_norm": 3.7374091148376465,
"learning_rate": 8.004848830576156e-06,
"loss": 0.1491,
"step": 1900
},
{
"epoch": 0.7630080894836713,
"grad_norm": 2.990825891494751,
"learning_rate": 7.990587564175699e-06,
"loss": 0.1495,
"step": 1910
},
{
"epoch": 0.7670028962348946,
"grad_norm": 3.381781816482544,
"learning_rate": 7.976326297775244e-06,
"loss": 0.1265,
"step": 1920
},
{
"epoch": 0.770997702986118,
"grad_norm": 3.0114705562591553,
"learning_rate": 7.962065031374787e-06,
"loss": 0.1421,
"step": 1930
},
{
"epoch": 0.7749925097373415,
"grad_norm": 3.9027457237243652,
"learning_rate": 7.94780376497433e-06,
"loss": 0.1453,
"step": 1940
},
{
"epoch": 0.7789873164885649,
"grad_norm": 5.936771869659424,
"learning_rate": 7.933542498573873e-06,
"loss": 0.1426,
"step": 1950
},
{
"epoch": 0.7829821232397883,
"grad_norm": 3.449986219406128,
"learning_rate": 7.919281232173418e-06,
"loss": 0.1722,
"step": 1960
},
{
"epoch": 0.7869769299910117,
"grad_norm": 3.3907039165496826,
"learning_rate": 7.905019965772962e-06,
"loss": 0.1464,
"step": 1970
},
{
"epoch": 0.7909717367422351,
"grad_norm": 3.0017893314361572,
"learning_rate": 7.890758699372505e-06,
"loss": 0.1562,
"step": 1980
},
{
"epoch": 0.7949665434934585,
"grad_norm": 3.4000141620635986,
"learning_rate": 7.876497432972048e-06,
"loss": 0.173,
"step": 1990
},
{
"epoch": 0.798961350244682,
"grad_norm": 3.022404670715332,
"learning_rate": 7.862236166571591e-06,
"loss": 0.15,
"step": 2000
},
{
"epoch": 0.8029561569959053,
"grad_norm": 3.237109899520874,
"learning_rate": 7.847974900171136e-06,
"loss": 0.1571,
"step": 2010
},
{
"epoch": 0.8069509637471287,
"grad_norm": 2.5590929985046387,
"learning_rate": 7.83371363377068e-06,
"loss": 0.1596,
"step": 2020
},
{
"epoch": 0.8109457704983521,
"grad_norm": 4.01102352142334,
"learning_rate": 7.819452367370223e-06,
"loss": 0.1511,
"step": 2030
},
{
"epoch": 0.8149405772495756,
"grad_norm": 3.1152961254119873,
"learning_rate": 7.805191100969766e-06,
"loss": 0.1564,
"step": 2040
},
{
"epoch": 0.818935384000799,
"grad_norm": 3.8849892616271973,
"learning_rate": 7.790929834569311e-06,
"loss": 0.1686,
"step": 2050
},
{
"epoch": 0.8229301907520223,
"grad_norm": 4.077883243560791,
"learning_rate": 7.776668568168854e-06,
"loss": 0.1519,
"step": 2060
},
{
"epoch": 0.8269249975032458,
"grad_norm": 3.1383252143859863,
"learning_rate": 7.762407301768397e-06,
"loss": 0.1572,
"step": 2070
},
{
"epoch": 0.8309198042544692,
"grad_norm": 3.380702257156372,
"learning_rate": 7.748146035367942e-06,
"loss": 0.147,
"step": 2080
},
{
"epoch": 0.8349146110056926,
"grad_norm": 3.4674811363220215,
"learning_rate": 7.733884768967484e-06,
"loss": 0.1802,
"step": 2090
},
{
"epoch": 0.838909417756916,
"grad_norm": 3.4076437950134277,
"learning_rate": 7.719623502567029e-06,
"loss": 0.1562,
"step": 2100
},
{
"epoch": 0.8429042245081394,
"grad_norm": 4.0926361083984375,
"learning_rate": 7.705362236166572e-06,
"loss": 0.1563,
"step": 2110
},
{
"epoch": 0.8468990312593628,
"grad_norm": 3.7539260387420654,
"learning_rate": 7.691100969766115e-06,
"loss": 0.1474,
"step": 2120
},
{
"epoch": 0.8508938380105863,
"grad_norm": 3.1815896034240723,
"learning_rate": 7.676839703365659e-06,
"loss": 0.1644,
"step": 2130
},
{
"epoch": 0.8548886447618097,
"grad_norm": 4.006225109100342,
"learning_rate": 7.662578436965204e-06,
"loss": 0.1672,
"step": 2140
},
{
"epoch": 0.858883451513033,
"grad_norm": 2.3754403591156006,
"learning_rate": 7.648317170564747e-06,
"loss": 0.1344,
"step": 2150
},
{
"epoch": 0.8628782582642565,
"grad_norm": 3.58271861076355,
"learning_rate": 7.63405590416429e-06,
"loss": 0.1707,
"step": 2160
},
{
"epoch": 0.8668730650154799,
"grad_norm": 3.560786247253418,
"learning_rate": 7.619794637763834e-06,
"loss": 0.1716,
"step": 2170
},
{
"epoch": 0.8708678717667033,
"grad_norm": 3.7507925033569336,
"learning_rate": 7.6055333713633774e-06,
"loss": 0.1385,
"step": 2180
},
{
"epoch": 0.8748626785179267,
"grad_norm": 3.3080689907073975,
"learning_rate": 7.5912721049629215e-06,
"loss": 0.1409,
"step": 2190
},
{
"epoch": 0.8788574852691501,
"grad_norm": 2.7758231163024902,
"learning_rate": 7.577010838562466e-06,
"loss": 0.1389,
"step": 2200
},
{
"epoch": 0.8828522920203735,
"grad_norm": 3.5948941707611084,
"learning_rate": 7.562749572162008e-06,
"loss": 0.1322,
"step": 2210
},
{
"epoch": 0.886847098771597,
"grad_norm": 3.502706527709961,
"learning_rate": 7.548488305761552e-06,
"loss": 0.149,
"step": 2220
},
{
"epoch": 0.8908419055228204,
"grad_norm": 3.414030075073242,
"learning_rate": 7.534227039361096e-06,
"loss": 0.1545,
"step": 2230
},
{
"epoch": 0.8948367122740437,
"grad_norm": 3.6713764667510986,
"learning_rate": 7.519965772960639e-06,
"loss": 0.1423,
"step": 2240
},
{
"epoch": 0.8988315190252671,
"grad_norm": 3.5644688606262207,
"learning_rate": 7.505704506560183e-06,
"loss": 0.1738,
"step": 2250
},
{
"epoch": 0.9028263257764906,
"grad_norm": 3.30999755859375,
"learning_rate": 7.491443240159727e-06,
"loss": 0.1751,
"step": 2260
},
{
"epoch": 0.906821132527714,
"grad_norm": 3.0740654468536377,
"learning_rate": 7.47718197375927e-06,
"loss": 0.1293,
"step": 2270
},
{
"epoch": 0.9108159392789373,
"grad_norm": 2.7236621379852295,
"learning_rate": 7.462920707358814e-06,
"loss": 0.1669,
"step": 2280
},
{
"epoch": 0.9148107460301608,
"grad_norm": 3.11482310295105,
"learning_rate": 7.448659440958358e-06,
"loss": 0.1239,
"step": 2290
},
{
"epoch": 0.9188055527813842,
"grad_norm": 3.0215625762939453,
"learning_rate": 7.434398174557901e-06,
"loss": 0.1527,
"step": 2300
},
{
"epoch": 0.9228003595326076,
"grad_norm": 3.896244764328003,
"learning_rate": 7.420136908157445e-06,
"loss": 0.1498,
"step": 2310
},
{
"epoch": 0.9267951662838311,
"grad_norm": 4.210872173309326,
"learning_rate": 7.405875641756989e-06,
"loss": 0.1521,
"step": 2320
},
{
"epoch": 0.9307899730350544,
"grad_norm": 4.199804306030273,
"learning_rate": 7.391614375356533e-06,
"loss": 0.1363,
"step": 2330
},
{
"epoch": 0.9347847797862778,
"grad_norm": 3.445571184158325,
"learning_rate": 7.377353108956075e-06,
"loss": 0.141,
"step": 2340
},
{
"epoch": 0.9387795865375013,
"grad_norm": 3.0716307163238525,
"learning_rate": 7.3630918425556194e-06,
"loss": 0.1502,
"step": 2350
},
{
"epoch": 0.9427743932887247,
"grad_norm": 3.4230117797851562,
"learning_rate": 7.3488305761551635e-06,
"loss": 0.1544,
"step": 2360
},
{
"epoch": 0.946769200039948,
"grad_norm": 3.4998185634613037,
"learning_rate": 7.334569309754707e-06,
"loss": 0.1414,
"step": 2370
},
{
"epoch": 0.9507640067911715,
"grad_norm": 3.726682186126709,
"learning_rate": 7.320308043354251e-06,
"loss": 0.1272,
"step": 2380
},
{
"epoch": 0.9547588135423949,
"grad_norm": 3.086432456970215,
"learning_rate": 7.306046776953795e-06,
"loss": 0.1373,
"step": 2390
},
{
"epoch": 0.9587536202936183,
"grad_norm": 3.308708667755127,
"learning_rate": 7.291785510553337e-06,
"loss": 0.1434,
"step": 2400
},
{
"epoch": 0.9627484270448418,
"grad_norm": 2.7734992504119873,
"learning_rate": 7.2775242441528815e-06,
"loss": 0.1575,
"step": 2410
},
{
"epoch": 0.9667432337960651,
"grad_norm": 2.2432265281677246,
"learning_rate": 7.2632629777524256e-06,
"loss": 0.1208,
"step": 2420
},
{
"epoch": 0.9707380405472885,
"grad_norm": 3.2374532222747803,
"learning_rate": 7.249001711351969e-06,
"loss": 0.1366,
"step": 2430
},
{
"epoch": 0.974732847298512,
"grad_norm": 2.740648031234741,
"learning_rate": 7.234740444951512e-06,
"loss": 0.1514,
"step": 2440
},
{
"epoch": 0.9787276540497354,
"grad_norm": 3.543654441833496,
"learning_rate": 7.220479178551056e-06,
"loss": 0.1261,
"step": 2450
},
{
"epoch": 0.9827224608009587,
"grad_norm": 3.6213510036468506,
"learning_rate": 7.206217912150599e-06,
"loss": 0.1482,
"step": 2460
},
{
"epoch": 0.9867172675521821,
"grad_norm": 2.9423608779907227,
"learning_rate": 7.1919566457501435e-06,
"loss": 0.158,
"step": 2470
},
{
"epoch": 0.9907120743034056,
"grad_norm": 5.1055145263671875,
"learning_rate": 7.1776953793496876e-06,
"loss": 0.1245,
"step": 2480
},
{
"epoch": 0.994706881054629,
"grad_norm": 3.547549247741699,
"learning_rate": 7.16343411294923e-06,
"loss": 0.1582,
"step": 2490
},
{
"epoch": 0.9987016878058524,
"grad_norm": 2.7595136165618896,
"learning_rate": 7.149172846548774e-06,
"loss": 0.1446,
"step": 2500
},
{
"epoch": 0.9987016878058524,
"eval_loss": 0.08971710503101349,
"eval_runtime": 11133.1611,
"eval_samples_per_second": 1.799,
"eval_steps_per_second": 0.225,
"eval_wer": 8.384208161162686,
"step": 2500
}
],
"logging_steps": 10,
"max_steps": 7512,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 2500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 8.16483926016e+19,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}