SALAMA_NEWMED1 / checkpoint-4000 /trainer_state.json
EYEDOL's picture
Upload folder using huggingface_hub
752420e verified
{
"best_global_step": 4000,
"best_metric": 13.268986926675701,
"best_model_checkpoint": "./SALAMA_NEWMED1/checkpoint-4000",
"epoch": 1.597623089983022,
"eval_steps": 2000,
"global_step": 4000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00399480675122341,
"grad_norm": 29.325483322143555,
"learning_rate": 1.8e-07,
"loss": 3.9137,
"step": 10
},
{
"epoch": 0.00798961350244682,
"grad_norm": 53.530357360839844,
"learning_rate": 3.8e-07,
"loss": 3.8712,
"step": 20
},
{
"epoch": 0.011984420253670228,
"grad_norm": 36.358612060546875,
"learning_rate": 5.800000000000001e-07,
"loss": 3.0878,
"step": 30
},
{
"epoch": 0.01597922700489364,
"grad_norm": 16.489139556884766,
"learning_rate": 7.8e-07,
"loss": 2.7678,
"step": 40
},
{
"epoch": 0.019974033756117046,
"grad_norm": 12.16690444946289,
"learning_rate": 9.800000000000001e-07,
"loss": 2.3664,
"step": 50
},
{
"epoch": 0.023968840507340456,
"grad_norm": 12.258421897888184,
"learning_rate": 1.1800000000000001e-06,
"loss": 2.1087,
"step": 60
},
{
"epoch": 0.027963647258563866,
"grad_norm": 10.25462818145752,
"learning_rate": 1.3800000000000001e-06,
"loss": 1.8865,
"step": 70
},
{
"epoch": 0.03195845400978728,
"grad_norm": 10.944440841674805,
"learning_rate": 1.5800000000000001e-06,
"loss": 1.7902,
"step": 80
},
{
"epoch": 0.035953260761010686,
"grad_norm": 10.586995124816895,
"learning_rate": 1.7800000000000001e-06,
"loss": 1.6421,
"step": 90
},
{
"epoch": 0.03994806751223409,
"grad_norm": 10.034771919250488,
"learning_rate": 1.98e-06,
"loss": 1.5449,
"step": 100
},
{
"epoch": 0.043942874263457506,
"grad_norm": 10.443981170654297,
"learning_rate": 2.1800000000000003e-06,
"loss": 1.4428,
"step": 110
},
{
"epoch": 0.04793768101468091,
"grad_norm": 9.427363395690918,
"learning_rate": 2.38e-06,
"loss": 1.2572,
"step": 120
},
{
"epoch": 0.051932487765904325,
"grad_norm": 10.59061050415039,
"learning_rate": 2.5800000000000003e-06,
"loss": 1.072,
"step": 130
},
{
"epoch": 0.05592729451712773,
"grad_norm": 7.301783084869385,
"learning_rate": 2.7800000000000005e-06,
"loss": 0.7508,
"step": 140
},
{
"epoch": 0.059922101268351145,
"grad_norm": 8.623008728027344,
"learning_rate": 2.9800000000000003e-06,
"loss": 0.7196,
"step": 150
},
{
"epoch": 0.06391690801957456,
"grad_norm": 8.511634826660156,
"learning_rate": 3.1800000000000005e-06,
"loss": 0.6404,
"step": 160
},
{
"epoch": 0.06791171477079797,
"grad_norm": 8.052703857421875,
"learning_rate": 3.3800000000000007e-06,
"loss": 0.6112,
"step": 170
},
{
"epoch": 0.07190652152202137,
"grad_norm": 7.108290672302246,
"learning_rate": 3.58e-06,
"loss": 0.6507,
"step": 180
},
{
"epoch": 0.07590132827324478,
"grad_norm": 7.996196269989014,
"learning_rate": 3.7800000000000002e-06,
"loss": 0.6163,
"step": 190
},
{
"epoch": 0.07989613502446818,
"grad_norm": 6.643161773681641,
"learning_rate": 3.980000000000001e-06,
"loss": 0.5806,
"step": 200
},
{
"epoch": 0.0838909417756916,
"grad_norm": 7.640316009521484,
"learning_rate": 4.18e-06,
"loss": 0.5503,
"step": 210
},
{
"epoch": 0.08788574852691501,
"grad_norm": 7.485447883605957,
"learning_rate": 4.38e-06,
"loss": 0.5672,
"step": 220
},
{
"epoch": 0.09188055527813842,
"grad_norm": 7.693063735961914,
"learning_rate": 4.58e-06,
"loss": 0.5593,
"step": 230
},
{
"epoch": 0.09587536202936182,
"grad_norm": 7.907495498657227,
"learning_rate": 4.78e-06,
"loss": 0.5122,
"step": 240
},
{
"epoch": 0.09987016878058524,
"grad_norm": 6.673120021820068,
"learning_rate": 4.980000000000001e-06,
"loss": 0.5111,
"step": 250
},
{
"epoch": 0.10386497553180865,
"grad_norm": 6.849791049957275,
"learning_rate": 5.18e-06,
"loss": 0.5122,
"step": 260
},
{
"epoch": 0.10785978228303206,
"grad_norm": 6.444810390472412,
"learning_rate": 5.380000000000001e-06,
"loss": 0.4601,
"step": 270
},
{
"epoch": 0.11185458903425546,
"grad_norm": 5.884876728057861,
"learning_rate": 5.580000000000001e-06,
"loss": 0.4452,
"step": 280
},
{
"epoch": 0.11584939578547888,
"grad_norm": 5.974656105041504,
"learning_rate": 5.78e-06,
"loss": 0.495,
"step": 290
},
{
"epoch": 0.11984420253670229,
"grad_norm": 5.884718418121338,
"learning_rate": 5.98e-06,
"loss": 0.4276,
"step": 300
},
{
"epoch": 0.1238390092879257,
"grad_norm": 6.197109699249268,
"learning_rate": 6.18e-06,
"loss": 0.4378,
"step": 310
},
{
"epoch": 0.12783381603914912,
"grad_norm": 6.656605243682861,
"learning_rate": 6.380000000000001e-06,
"loss": 0.4352,
"step": 320
},
{
"epoch": 0.13182862279037252,
"grad_norm": 6.549432277679443,
"learning_rate": 6.5800000000000005e-06,
"loss": 0.4335,
"step": 330
},
{
"epoch": 0.13582342954159593,
"grad_norm": 5.669299125671387,
"learning_rate": 6.780000000000001e-06,
"loss": 0.4776,
"step": 340
},
{
"epoch": 0.13981823629281934,
"grad_norm": 5.265035152435303,
"learning_rate": 6.98e-06,
"loss": 0.4493,
"step": 350
},
{
"epoch": 0.14381304304404274,
"grad_norm": 6.127915382385254,
"learning_rate": 7.180000000000001e-06,
"loss": 0.4505,
"step": 360
},
{
"epoch": 0.14780784979526615,
"grad_norm": 6.895223617553711,
"learning_rate": 7.3800000000000005e-06,
"loss": 0.4369,
"step": 370
},
{
"epoch": 0.15180265654648956,
"grad_norm": 5.936121940612793,
"learning_rate": 7.58e-06,
"loss": 0.4212,
"step": 380
},
{
"epoch": 0.15579746329771296,
"grad_norm": 5.121963977813721,
"learning_rate": 7.78e-06,
"loss": 0.3993,
"step": 390
},
{
"epoch": 0.15979227004893637,
"grad_norm": 6.143499851226807,
"learning_rate": 7.980000000000002e-06,
"loss": 0.4361,
"step": 400
},
{
"epoch": 0.1637870768001598,
"grad_norm": 5.8908491134643555,
"learning_rate": 8.18e-06,
"loss": 0.4003,
"step": 410
},
{
"epoch": 0.1677818835513832,
"grad_norm": 5.114202499389648,
"learning_rate": 8.380000000000001e-06,
"loss": 0.3558,
"step": 420
},
{
"epoch": 0.17177669030260662,
"grad_norm": 5.836564540863037,
"learning_rate": 8.580000000000001e-06,
"loss": 0.3742,
"step": 430
},
{
"epoch": 0.17577149705383002,
"grad_norm": 5.583286285400391,
"learning_rate": 8.78e-06,
"loss": 0.4112,
"step": 440
},
{
"epoch": 0.17976630380505343,
"grad_norm": 6.04997444152832,
"learning_rate": 8.98e-06,
"loss": 0.3803,
"step": 450
},
{
"epoch": 0.18376111055627684,
"grad_norm": 5.244260787963867,
"learning_rate": 9.180000000000002e-06,
"loss": 0.4266,
"step": 460
},
{
"epoch": 0.18775591730750024,
"grad_norm": 5.963865756988525,
"learning_rate": 9.38e-06,
"loss": 0.3907,
"step": 470
},
{
"epoch": 0.19175072405872365,
"grad_norm": 5.7191057205200195,
"learning_rate": 9.58e-06,
"loss": 0.3676,
"step": 480
},
{
"epoch": 0.19574553080994708,
"grad_norm": 5.471884727478027,
"learning_rate": 9.780000000000001e-06,
"loss": 0.3764,
"step": 490
},
{
"epoch": 0.1997403375611705,
"grad_norm": 6.182490825653076,
"learning_rate": 9.980000000000001e-06,
"loss": 0.3758,
"step": 500
},
{
"epoch": 0.2037351443123939,
"grad_norm": 4.735085487365723,
"learning_rate": 9.98716486023959e-06,
"loss": 0.3518,
"step": 510
},
{
"epoch": 0.2077299510636173,
"grad_norm": 5.506002426147461,
"learning_rate": 9.972903593839133e-06,
"loss": 0.3887,
"step": 520
},
{
"epoch": 0.2117247578148407,
"grad_norm": 5.19524621963501,
"learning_rate": 9.958642327438678e-06,
"loss": 0.3979,
"step": 530
},
{
"epoch": 0.21571956456606411,
"grad_norm": 6.315662860870361,
"learning_rate": 9.944381061038221e-06,
"loss": 0.3847,
"step": 540
},
{
"epoch": 0.21971437131728752,
"grad_norm": 5.95819616317749,
"learning_rate": 9.930119794637765e-06,
"loss": 0.3516,
"step": 550
},
{
"epoch": 0.22370917806851093,
"grad_norm": 5.994788646697998,
"learning_rate": 9.91585852823731e-06,
"loss": 0.346,
"step": 560
},
{
"epoch": 0.22770398481973433,
"grad_norm": 4.4159369468688965,
"learning_rate": 9.901597261836851e-06,
"loss": 0.3584,
"step": 570
},
{
"epoch": 0.23169879157095777,
"grad_norm": 4.532053470611572,
"learning_rate": 9.887335995436396e-06,
"loss": 0.4037,
"step": 580
},
{
"epoch": 0.23569359832218117,
"grad_norm": 5.412591457366943,
"learning_rate": 9.87307472903594e-06,
"loss": 0.3547,
"step": 590
},
{
"epoch": 0.23968840507340458,
"grad_norm": 5.319347381591797,
"learning_rate": 9.858813462635483e-06,
"loss": 0.3619,
"step": 600
},
{
"epoch": 0.243683211824628,
"grad_norm": 4.947774887084961,
"learning_rate": 9.844552196235026e-06,
"loss": 0.3764,
"step": 610
},
{
"epoch": 0.2476780185758514,
"grad_norm": 4.8416666984558105,
"learning_rate": 9.83029092983457e-06,
"loss": 0.3073,
"step": 620
},
{
"epoch": 0.2516728253270748,
"grad_norm": 6.026658535003662,
"learning_rate": 9.816029663434114e-06,
"loss": 0.3709,
"step": 630
},
{
"epoch": 0.25566763207829823,
"grad_norm": 6.480794429779053,
"learning_rate": 9.801768397033657e-06,
"loss": 0.3131,
"step": 640
},
{
"epoch": 0.2596624388295216,
"grad_norm": 5.907824516296387,
"learning_rate": 9.787507130633202e-06,
"loss": 0.3447,
"step": 650
},
{
"epoch": 0.26365724558074505,
"grad_norm": 4.916101932525635,
"learning_rate": 9.773245864232744e-06,
"loss": 0.3436,
"step": 660
},
{
"epoch": 0.2676520523319684,
"grad_norm": 5.599504470825195,
"learning_rate": 9.758984597832289e-06,
"loss": 0.3236,
"step": 670
},
{
"epoch": 0.27164685908319186,
"grad_norm": 6.6841654777526855,
"learning_rate": 9.744723331431832e-06,
"loss": 0.3491,
"step": 680
},
{
"epoch": 0.27564166583441524,
"grad_norm": 6.430059909820557,
"learning_rate": 9.730462065031375e-06,
"loss": 0.3343,
"step": 690
},
{
"epoch": 0.2796364725856387,
"grad_norm": 5.829887866973877,
"learning_rate": 9.71620079863092e-06,
"loss": 0.345,
"step": 700
},
{
"epoch": 0.28363127933686205,
"grad_norm": 4.85678243637085,
"learning_rate": 9.701939532230463e-06,
"loss": 0.3756,
"step": 710
},
{
"epoch": 0.2876260860880855,
"grad_norm": 4.557241439819336,
"learning_rate": 9.687678265830007e-06,
"loss": 0.2933,
"step": 720
},
{
"epoch": 0.2916208928393089,
"grad_norm": 5.587825298309326,
"learning_rate": 9.67341699942955e-06,
"loss": 0.3247,
"step": 730
},
{
"epoch": 0.2956156995905323,
"grad_norm": 5.785783290863037,
"learning_rate": 9.659155733029095e-06,
"loss": 0.3489,
"step": 740
},
{
"epoch": 0.29961050634175573,
"grad_norm": 5.360633850097656,
"learning_rate": 9.644894466628636e-06,
"loss": 0.3727,
"step": 750
},
{
"epoch": 0.3036053130929791,
"grad_norm": 5.479579448699951,
"learning_rate": 9.630633200228181e-06,
"loss": 0.3386,
"step": 760
},
{
"epoch": 0.30760011984420255,
"grad_norm": 5.192019462585449,
"learning_rate": 9.616371933827725e-06,
"loss": 0.3268,
"step": 770
},
{
"epoch": 0.3115949265954259,
"grad_norm": 5.177510738372803,
"learning_rate": 9.602110667427268e-06,
"loss": 0.3485,
"step": 780
},
{
"epoch": 0.31558973334664936,
"grad_norm": 4.933345317840576,
"learning_rate": 9.587849401026813e-06,
"loss": 0.3028,
"step": 790
},
{
"epoch": 0.31958454009787274,
"grad_norm": 4.921146869659424,
"learning_rate": 9.573588134626356e-06,
"loss": 0.3348,
"step": 800
},
{
"epoch": 0.32357934684909617,
"grad_norm": 5.784856796264648,
"learning_rate": 9.5593268682259e-06,
"loss": 0.2961,
"step": 810
},
{
"epoch": 0.3275741536003196,
"grad_norm": 5.327091693878174,
"learning_rate": 9.545065601825442e-06,
"loss": 0.2914,
"step": 820
},
{
"epoch": 0.331568960351543,
"grad_norm": 4.885860443115234,
"learning_rate": 9.530804335424987e-06,
"loss": 0.3156,
"step": 830
},
{
"epoch": 0.3355637671027664,
"grad_norm": 5.5808844566345215,
"learning_rate": 9.516543069024529e-06,
"loss": 0.3452,
"step": 840
},
{
"epoch": 0.3395585738539898,
"grad_norm": 5.16964054107666,
"learning_rate": 9.502281802624074e-06,
"loss": 0.2737,
"step": 850
},
{
"epoch": 0.34355338060521323,
"grad_norm": 4.464290142059326,
"learning_rate": 9.488020536223617e-06,
"loss": 0.2717,
"step": 860
},
{
"epoch": 0.3475481873564366,
"grad_norm": 4.1654791831970215,
"learning_rate": 9.47375926982316e-06,
"loss": 0.2875,
"step": 870
},
{
"epoch": 0.35154299410766005,
"grad_norm": 5.758086681365967,
"learning_rate": 9.459498003422705e-06,
"loss": 0.2947,
"step": 880
},
{
"epoch": 0.3555378008588834,
"grad_norm": 4.330506801605225,
"learning_rate": 9.445236737022249e-06,
"loss": 0.2957,
"step": 890
},
{
"epoch": 0.35953260761010686,
"grad_norm": 4.616450786590576,
"learning_rate": 9.430975470621792e-06,
"loss": 0.2752,
"step": 900
},
{
"epoch": 0.3635274143613303,
"grad_norm": 4.364641189575195,
"learning_rate": 9.416714204221335e-06,
"loss": 0.33,
"step": 910
},
{
"epoch": 0.36752222111255367,
"grad_norm": 4.513298034667969,
"learning_rate": 9.40245293782088e-06,
"loss": 0.3098,
"step": 920
},
{
"epoch": 0.3715170278637771,
"grad_norm": 5.608431816101074,
"learning_rate": 9.388191671420423e-06,
"loss": 0.2962,
"step": 930
},
{
"epoch": 0.3755118346150005,
"grad_norm": 3.523578405380249,
"learning_rate": 9.373930405019966e-06,
"loss": 0.2754,
"step": 940
},
{
"epoch": 0.3795066413662239,
"grad_norm": 5.222875595092773,
"learning_rate": 9.35966913861951e-06,
"loss": 0.2875,
"step": 950
},
{
"epoch": 0.3835014481174473,
"grad_norm": 5.467960357666016,
"learning_rate": 9.345407872219053e-06,
"loss": 0.3089,
"step": 960
},
{
"epoch": 0.38749625486867073,
"grad_norm": 4.540034770965576,
"learning_rate": 9.331146605818598e-06,
"loss": 0.2808,
"step": 970
},
{
"epoch": 0.39149106161989417,
"grad_norm": 4.5449628829956055,
"learning_rate": 9.316885339418141e-06,
"loss": 0.3063,
"step": 980
},
{
"epoch": 0.39548586837111754,
"grad_norm": 5.014548301696777,
"learning_rate": 9.302624073017684e-06,
"loss": 0.2795,
"step": 990
},
{
"epoch": 0.399480675122341,
"grad_norm": 4.645490646362305,
"learning_rate": 9.288362806617228e-06,
"loss": 0.2749,
"step": 1000
},
{
"epoch": 0.40347548187356436,
"grad_norm": 4.577834606170654,
"learning_rate": 9.274101540216773e-06,
"loss": 0.2722,
"step": 1010
},
{
"epoch": 0.4074702886247878,
"grad_norm": 3.580354928970337,
"learning_rate": 9.259840273816316e-06,
"loss": 0.2543,
"step": 1020
},
{
"epoch": 0.41146509537601117,
"grad_norm": 4.645276069641113,
"learning_rate": 9.245579007415859e-06,
"loss": 0.2703,
"step": 1030
},
{
"epoch": 0.4154599021272346,
"grad_norm": 4.813560485839844,
"learning_rate": 9.231317741015402e-06,
"loss": 0.2708,
"step": 1040
},
{
"epoch": 0.419454708878458,
"grad_norm": 4.8925371170043945,
"learning_rate": 9.217056474614946e-06,
"loss": 0.2894,
"step": 1050
},
{
"epoch": 0.4234495156296814,
"grad_norm": 4.675732135772705,
"learning_rate": 9.20279520821449e-06,
"loss": 0.2692,
"step": 1060
},
{
"epoch": 0.42744432238090485,
"grad_norm": 4.893064975738525,
"learning_rate": 9.188533941814034e-06,
"loss": 0.3143,
"step": 1070
},
{
"epoch": 0.43143912913212823,
"grad_norm": 4.4499592781066895,
"learning_rate": 9.174272675413579e-06,
"loss": 0.2846,
"step": 1080
},
{
"epoch": 0.43543393588335166,
"grad_norm": 5.22846794128418,
"learning_rate": 9.16001140901312e-06,
"loss": 0.2384,
"step": 1090
},
{
"epoch": 0.43942874263457504,
"grad_norm": 4.598273277282715,
"learning_rate": 9.145750142612665e-06,
"loss": 0.2798,
"step": 1100
},
{
"epoch": 0.4434235493857985,
"grad_norm": 4.458007335662842,
"learning_rate": 9.131488876212208e-06,
"loss": 0.2677,
"step": 1110
},
{
"epoch": 0.44741835613702186,
"grad_norm": 4.530386447906494,
"learning_rate": 9.117227609811752e-06,
"loss": 0.2985,
"step": 1120
},
{
"epoch": 0.4514131628882453,
"grad_norm": 4.605809688568115,
"learning_rate": 9.102966343411297e-06,
"loss": 0.2862,
"step": 1130
},
{
"epoch": 0.45540796963946867,
"grad_norm": 4.465124607086182,
"learning_rate": 9.08870507701084e-06,
"loss": 0.2603,
"step": 1140
},
{
"epoch": 0.4594027763906921,
"grad_norm": 4.143098831176758,
"learning_rate": 9.074443810610383e-06,
"loss": 0.2777,
"step": 1150
},
{
"epoch": 0.46339758314191554,
"grad_norm": 4.249698162078857,
"learning_rate": 9.060182544209926e-06,
"loss": 0.2545,
"step": 1160
},
{
"epoch": 0.4673923898931389,
"grad_norm": 5.094964027404785,
"learning_rate": 9.045921277809471e-06,
"loss": 0.2644,
"step": 1170
},
{
"epoch": 0.47138719664436235,
"grad_norm": 4.91431999206543,
"learning_rate": 9.031660011409013e-06,
"loss": 0.2718,
"step": 1180
},
{
"epoch": 0.47538200339558573,
"grad_norm": 5.91095495223999,
"learning_rate": 9.017398745008558e-06,
"loss": 0.2519,
"step": 1190
},
{
"epoch": 0.47937681014680916,
"grad_norm": 4.603298187255859,
"learning_rate": 9.003137478608101e-06,
"loss": 0.265,
"step": 1200
},
{
"epoch": 0.48337161689803254,
"grad_norm": 4.797214031219482,
"learning_rate": 8.988876212207644e-06,
"loss": 0.2407,
"step": 1210
},
{
"epoch": 0.487366423649256,
"grad_norm": 4.030250072479248,
"learning_rate": 8.97461494580719e-06,
"loss": 0.268,
"step": 1220
},
{
"epoch": 0.49136123040047935,
"grad_norm": 5.035445213317871,
"learning_rate": 8.960353679406733e-06,
"loss": 0.2636,
"step": 1230
},
{
"epoch": 0.4953560371517028,
"grad_norm": 4.499198913574219,
"learning_rate": 8.946092413006276e-06,
"loss": 0.2651,
"step": 1240
},
{
"epoch": 0.4993508439029262,
"grad_norm": 3.754934310913086,
"learning_rate": 8.931831146605819e-06,
"loss": 0.2537,
"step": 1250
},
{
"epoch": 0.5033456506541496,
"grad_norm": 4.640712738037109,
"learning_rate": 8.917569880205364e-06,
"loss": 0.2713,
"step": 1260
},
{
"epoch": 0.507340457405373,
"grad_norm": 4.7820305824279785,
"learning_rate": 8.903308613804906e-06,
"loss": 0.2554,
"step": 1270
},
{
"epoch": 0.5113352641565965,
"grad_norm": 5.130531311035156,
"learning_rate": 8.88904734740445e-06,
"loss": 0.2607,
"step": 1280
},
{
"epoch": 0.5153300709078198,
"grad_norm": 4.392201900482178,
"learning_rate": 8.874786081003994e-06,
"loss": 0.2445,
"step": 1290
},
{
"epoch": 0.5193248776590432,
"grad_norm": 3.3310599327087402,
"learning_rate": 8.860524814603537e-06,
"loss": 0.2355,
"step": 1300
},
{
"epoch": 0.5233196844102667,
"grad_norm": 3.532226800918579,
"learning_rate": 8.846263548203082e-06,
"loss": 0.2255,
"step": 1310
},
{
"epoch": 0.5273144911614901,
"grad_norm": 4.452306270599365,
"learning_rate": 8.832002281802625e-06,
"loss": 0.2671,
"step": 1320
},
{
"epoch": 0.5313092979127134,
"grad_norm": 3.8220486640930176,
"learning_rate": 8.817741015402168e-06,
"loss": 0.2731,
"step": 1330
},
{
"epoch": 0.5353041046639369,
"grad_norm": 4.6910319328308105,
"learning_rate": 8.803479749001712e-06,
"loss": 0.2617,
"step": 1340
},
{
"epoch": 0.5392989114151603,
"grad_norm": 4.17935848236084,
"learning_rate": 8.789218482601257e-06,
"loss": 0.2433,
"step": 1350
},
{
"epoch": 0.5432937181663837,
"grad_norm": 5.38765811920166,
"learning_rate": 8.7749572162008e-06,
"loss": 0.2466,
"step": 1360
},
{
"epoch": 0.5472885249176072,
"grad_norm": 4.427072525024414,
"learning_rate": 8.760695949800343e-06,
"loss": 0.2465,
"step": 1370
},
{
"epoch": 0.5512833316688305,
"grad_norm": 5.366933345794678,
"learning_rate": 8.746434683399886e-06,
"loss": 0.2568,
"step": 1380
},
{
"epoch": 0.5552781384200539,
"grad_norm": 4.636244773864746,
"learning_rate": 8.73217341699943e-06,
"loss": 0.2652,
"step": 1390
},
{
"epoch": 0.5592729451712773,
"grad_norm": 3.3288471698760986,
"learning_rate": 8.717912150598975e-06,
"loss": 0.2183,
"step": 1400
},
{
"epoch": 0.5632677519225008,
"grad_norm": 5.008598327636719,
"learning_rate": 8.703650884198518e-06,
"loss": 0.2838,
"step": 1410
},
{
"epoch": 0.5672625586737241,
"grad_norm": 5.04400110244751,
"learning_rate": 8.689389617798061e-06,
"loss": 0.2521,
"step": 1420
},
{
"epoch": 0.5712573654249475,
"grad_norm": 3.676771640777588,
"learning_rate": 8.675128351397604e-06,
"loss": 0.2235,
"step": 1430
},
{
"epoch": 0.575252172176171,
"grad_norm": 3.988940715789795,
"learning_rate": 8.66086708499715e-06,
"loss": 0.2561,
"step": 1440
},
{
"epoch": 0.5792469789273944,
"grad_norm": 3.753837823867798,
"learning_rate": 8.646605818596692e-06,
"loss": 0.2349,
"step": 1450
},
{
"epoch": 0.5832417856786178,
"grad_norm": 5.020956993103027,
"learning_rate": 8.632344552196236e-06,
"loss": 0.2605,
"step": 1460
},
{
"epoch": 0.5872365924298412,
"grad_norm": 4.19816780090332,
"learning_rate": 8.618083285795779e-06,
"loss": 0.2229,
"step": 1470
},
{
"epoch": 0.5912313991810646,
"grad_norm": 3.8098461627960205,
"learning_rate": 8.603822019395322e-06,
"loss": 0.2286,
"step": 1480
},
{
"epoch": 0.595226205932288,
"grad_norm": 4.122703552246094,
"learning_rate": 8.589560752994867e-06,
"loss": 0.2257,
"step": 1490
},
{
"epoch": 0.5992210126835115,
"grad_norm": 5.137128829956055,
"learning_rate": 8.57529948659441e-06,
"loss": 0.2531,
"step": 1500
},
{
"epoch": 0.6032158194347348,
"grad_norm": 4.318984508514404,
"learning_rate": 8.561038220193954e-06,
"loss": 0.2384,
"step": 1510
},
{
"epoch": 0.6072106261859582,
"grad_norm": 3.5681166648864746,
"learning_rate": 8.546776953793497e-06,
"loss": 0.282,
"step": 1520
},
{
"epoch": 0.6112054329371817,
"grad_norm": 2.9932525157928467,
"learning_rate": 8.532515687393042e-06,
"loss": 0.2095,
"step": 1530
},
{
"epoch": 0.6152002396884051,
"grad_norm": 3.375771999359131,
"learning_rate": 8.518254420992585e-06,
"loss": 0.2333,
"step": 1540
},
{
"epoch": 0.6191950464396285,
"grad_norm": 4.841177940368652,
"learning_rate": 8.503993154592128e-06,
"loss": 0.2295,
"step": 1550
},
{
"epoch": 0.6231898531908519,
"grad_norm": 4.5939717292785645,
"learning_rate": 8.489731888191672e-06,
"loss": 0.2494,
"step": 1560
},
{
"epoch": 0.6271846599420753,
"grad_norm": 3.88786244392395,
"learning_rate": 8.475470621791215e-06,
"loss": 0.2324,
"step": 1570
},
{
"epoch": 0.6311794666932987,
"grad_norm": 3.3802106380462646,
"learning_rate": 8.46120935539076e-06,
"loss": 0.2413,
"step": 1580
},
{
"epoch": 0.6351742734445222,
"grad_norm": 4.619571685791016,
"learning_rate": 8.446948088990303e-06,
"loss": 0.2165,
"step": 1590
},
{
"epoch": 0.6391690801957455,
"grad_norm": 4.9018874168396,
"learning_rate": 8.432686822589846e-06,
"loss": 0.2496,
"step": 1600
},
{
"epoch": 0.6431638869469689,
"grad_norm": 4.832056522369385,
"learning_rate": 8.41842555618939e-06,
"loss": 0.1857,
"step": 1610
},
{
"epoch": 0.6471586936981923,
"grad_norm": 4.44606351852417,
"learning_rate": 8.404164289788934e-06,
"loss": 0.2469,
"step": 1620
},
{
"epoch": 0.6511535004494158,
"grad_norm": 4.6510138511657715,
"learning_rate": 8.389903023388478e-06,
"loss": 0.2419,
"step": 1630
},
{
"epoch": 0.6551483072006392,
"grad_norm": 4.438849925994873,
"learning_rate": 8.375641756988021e-06,
"loss": 0.2488,
"step": 1640
},
{
"epoch": 0.6591431139518625,
"grad_norm": 4.8355889320373535,
"learning_rate": 8.361380490587566e-06,
"loss": 0.2535,
"step": 1650
},
{
"epoch": 0.663137920703086,
"grad_norm": 3.961348533630371,
"learning_rate": 8.347119224187107e-06,
"loss": 0.2338,
"step": 1660
},
{
"epoch": 0.6671327274543094,
"grad_norm": 4.276237964630127,
"learning_rate": 8.332857957786652e-06,
"loss": 0.2417,
"step": 1670
},
{
"epoch": 0.6711275342055328,
"grad_norm": 4.317446231842041,
"learning_rate": 8.318596691386196e-06,
"loss": 0.215,
"step": 1680
},
{
"epoch": 0.6751223409567562,
"grad_norm": 4.8514018058776855,
"learning_rate": 8.304335424985739e-06,
"loss": 0.2462,
"step": 1690
},
{
"epoch": 0.6791171477079796,
"grad_norm": 5.050139427185059,
"learning_rate": 8.290074158585282e-06,
"loss": 0.2228,
"step": 1700
},
{
"epoch": 0.683111954459203,
"grad_norm": 3.6597845554351807,
"learning_rate": 8.275812892184827e-06,
"loss": 0.2337,
"step": 1710
},
{
"epoch": 0.6871067612104265,
"grad_norm": 3.3534600734710693,
"learning_rate": 8.26155162578437e-06,
"loss": 0.2402,
"step": 1720
},
{
"epoch": 0.6911015679616499,
"grad_norm": 3.987107276916504,
"learning_rate": 8.247290359383914e-06,
"loss": 0.2154,
"step": 1730
},
{
"epoch": 0.6950963747128732,
"grad_norm": 3.4701499938964844,
"learning_rate": 8.233029092983458e-06,
"loss": 0.2446,
"step": 1740
},
{
"epoch": 0.6990911814640967,
"grad_norm": 3.479576826095581,
"learning_rate": 8.218767826583002e-06,
"loss": 0.2138,
"step": 1750
},
{
"epoch": 0.7030859882153201,
"grad_norm": 4.3463358879089355,
"learning_rate": 8.204506560182545e-06,
"loss": 0.2202,
"step": 1760
},
{
"epoch": 0.7070807949665435,
"grad_norm": 3.920137882232666,
"learning_rate": 8.190245293782088e-06,
"loss": 0.2184,
"step": 1770
},
{
"epoch": 0.7110756017177668,
"grad_norm": 4.3204121589660645,
"learning_rate": 8.175984027381633e-06,
"loss": 0.2467,
"step": 1780
},
{
"epoch": 0.7150704084689903,
"grad_norm": 4.039252758026123,
"learning_rate": 8.161722760981175e-06,
"loss": 0.2238,
"step": 1790
},
{
"epoch": 0.7190652152202137,
"grad_norm": 4.227252006530762,
"learning_rate": 8.14746149458072e-06,
"loss": 0.2023,
"step": 1800
},
{
"epoch": 0.7230600219714372,
"grad_norm": 2.9367706775665283,
"learning_rate": 8.133200228180263e-06,
"loss": 0.1977,
"step": 1810
},
{
"epoch": 0.7270548287226606,
"grad_norm": 4.3969831466674805,
"learning_rate": 8.118938961779806e-06,
"loss": 0.2494,
"step": 1820
},
{
"epoch": 0.7310496354738839,
"grad_norm": 3.7488110065460205,
"learning_rate": 8.104677695379351e-06,
"loss": 0.2295,
"step": 1830
},
{
"epoch": 0.7350444422251073,
"grad_norm": 5.048527717590332,
"learning_rate": 8.090416428978894e-06,
"loss": 0.2524,
"step": 1840
},
{
"epoch": 0.7390392489763308,
"grad_norm": 4.360014915466309,
"learning_rate": 8.076155162578438e-06,
"loss": 0.21,
"step": 1850
},
{
"epoch": 0.7430340557275542,
"grad_norm": 4.142049789428711,
"learning_rate": 8.06189389617798e-06,
"loss": 0.2198,
"step": 1860
},
{
"epoch": 0.7470288624787776,
"grad_norm": 3.6296193599700928,
"learning_rate": 8.047632629777526e-06,
"loss": 0.2239,
"step": 1870
},
{
"epoch": 0.751023669230001,
"grad_norm": 4.87466287612915,
"learning_rate": 8.033371363377069e-06,
"loss": 0.2253,
"step": 1880
},
{
"epoch": 0.7550184759812244,
"grad_norm": 3.6398117542266846,
"learning_rate": 8.019110096976612e-06,
"loss": 0.2325,
"step": 1890
},
{
"epoch": 0.7590132827324478,
"grad_norm": 3.6101794242858887,
"learning_rate": 8.004848830576156e-06,
"loss": 0.1967,
"step": 1900
},
{
"epoch": 0.7630080894836713,
"grad_norm": 4.282483100891113,
"learning_rate": 7.990587564175699e-06,
"loss": 0.2183,
"step": 1910
},
{
"epoch": 0.7670028962348946,
"grad_norm": 4.618693828582764,
"learning_rate": 7.976326297775244e-06,
"loss": 0.2356,
"step": 1920
},
{
"epoch": 0.770997702986118,
"grad_norm": 3.517585277557373,
"learning_rate": 7.962065031374787e-06,
"loss": 0.2346,
"step": 1930
},
{
"epoch": 0.7749925097373415,
"grad_norm": 3.8423843383789062,
"learning_rate": 7.94780376497433e-06,
"loss": 0.2436,
"step": 1940
},
{
"epoch": 0.7789873164885649,
"grad_norm": 4.099793910980225,
"learning_rate": 7.933542498573873e-06,
"loss": 0.2091,
"step": 1950
},
{
"epoch": 0.7829821232397883,
"grad_norm": 4.841848850250244,
"learning_rate": 7.919281232173418e-06,
"loss": 0.2328,
"step": 1960
},
{
"epoch": 0.7869769299910117,
"grad_norm": 4.761816024780273,
"learning_rate": 7.905019965772962e-06,
"loss": 0.2159,
"step": 1970
},
{
"epoch": 0.7909717367422351,
"grad_norm": 4.0117411613464355,
"learning_rate": 7.890758699372505e-06,
"loss": 0.2287,
"step": 1980
},
{
"epoch": 0.7949665434934585,
"grad_norm": 4.176918029785156,
"learning_rate": 7.876497432972048e-06,
"loss": 0.2036,
"step": 1990
},
{
"epoch": 0.798961350244682,
"grad_norm": 4.187548637390137,
"learning_rate": 7.862236166571591e-06,
"loss": 0.2175,
"step": 2000
},
{
"epoch": 0.798961350244682,
"eval_loss": 0.17748136818408966,
"eval_runtime": 8461.8051,
"eval_samples_per_second": 2.367,
"eval_steps_per_second": 0.296,
"eval_wer": 15.578133881334441,
"step": 2000
},
{
"epoch": 0.8029561569959053,
"grad_norm": 8.588675498962402,
"learning_rate": 7.847974900171136e-06,
"loss": 0.2548,
"step": 2010
},
{
"epoch": 0.8069509637471287,
"grad_norm": 4.142924785614014,
"learning_rate": 7.83371363377068e-06,
"loss": 0.2145,
"step": 2020
},
{
"epoch": 0.8109457704983521,
"grad_norm": 4.049276828765869,
"learning_rate": 7.819452367370223e-06,
"loss": 0.2227,
"step": 2030
},
{
"epoch": 0.8149405772495756,
"grad_norm": 4.361568450927734,
"learning_rate": 7.805191100969766e-06,
"loss": 0.2352,
"step": 2040
},
{
"epoch": 0.818935384000799,
"grad_norm": 4.02155876159668,
"learning_rate": 7.790929834569311e-06,
"loss": 0.1892,
"step": 2050
},
{
"epoch": 0.8229301907520223,
"grad_norm": 4.061072826385498,
"learning_rate": 7.776668568168854e-06,
"loss": 0.2219,
"step": 2060
},
{
"epoch": 0.8269249975032458,
"grad_norm": 3.2685811519622803,
"learning_rate": 7.762407301768397e-06,
"loss": 0.2057,
"step": 2070
},
{
"epoch": 0.8309198042544692,
"grad_norm": 4.679708480834961,
"learning_rate": 7.748146035367942e-06,
"loss": 0.2364,
"step": 2080
},
{
"epoch": 0.8349146110056926,
"grad_norm": 3.1732335090637207,
"learning_rate": 7.733884768967484e-06,
"loss": 0.2364,
"step": 2090
},
{
"epoch": 0.838909417756916,
"grad_norm": 4.397407054901123,
"learning_rate": 7.719623502567029e-06,
"loss": 0.2251,
"step": 2100
},
{
"epoch": 0.8429042245081394,
"grad_norm": 3.7484922409057617,
"learning_rate": 7.705362236166572e-06,
"loss": 0.2032,
"step": 2110
},
{
"epoch": 0.8468990312593628,
"grad_norm": 4.67540168762207,
"learning_rate": 7.691100969766115e-06,
"loss": 0.2252,
"step": 2120
},
{
"epoch": 0.8508938380105863,
"grad_norm": 3.038043975830078,
"learning_rate": 7.676839703365659e-06,
"loss": 0.2204,
"step": 2130
},
{
"epoch": 0.8548886447618097,
"grad_norm": 4.430922985076904,
"learning_rate": 7.662578436965204e-06,
"loss": 0.2214,
"step": 2140
},
{
"epoch": 0.858883451513033,
"grad_norm": 3.289482593536377,
"learning_rate": 7.648317170564747e-06,
"loss": 0.2301,
"step": 2150
},
{
"epoch": 0.8628782582642565,
"grad_norm": 3.887826919555664,
"learning_rate": 7.63405590416429e-06,
"loss": 0.2448,
"step": 2160
},
{
"epoch": 0.8668730650154799,
"grad_norm": 4.0688157081604,
"learning_rate": 7.619794637763834e-06,
"loss": 0.1966,
"step": 2170
},
{
"epoch": 0.8708678717667033,
"grad_norm": 3.688675880432129,
"learning_rate": 7.6055333713633774e-06,
"loss": 0.197,
"step": 2180
},
{
"epoch": 0.8748626785179267,
"grad_norm": 3.8685896396636963,
"learning_rate": 7.5912721049629215e-06,
"loss": 0.2144,
"step": 2190
},
{
"epoch": 0.8788574852691501,
"grad_norm": 3.8697168827056885,
"learning_rate": 7.577010838562466e-06,
"loss": 0.21,
"step": 2200
},
{
"epoch": 0.8828522920203735,
"grad_norm": 3.6243553161621094,
"learning_rate": 7.562749572162008e-06,
"loss": 0.2115,
"step": 2210
},
{
"epoch": 0.886847098771597,
"grad_norm": 4.4122443199157715,
"learning_rate": 7.548488305761552e-06,
"loss": 0.2297,
"step": 2220
},
{
"epoch": 0.8908419055228204,
"grad_norm": 3.5293548107147217,
"learning_rate": 7.534227039361096e-06,
"loss": 0.2012,
"step": 2230
},
{
"epoch": 0.8948367122740437,
"grad_norm": 3.237689733505249,
"learning_rate": 7.519965772960639e-06,
"loss": 0.2104,
"step": 2240
},
{
"epoch": 0.8988315190252671,
"grad_norm": 3.9817986488342285,
"learning_rate": 7.505704506560183e-06,
"loss": 0.2178,
"step": 2250
},
{
"epoch": 0.9028263257764906,
"grad_norm": 4.46220064163208,
"learning_rate": 7.491443240159727e-06,
"loss": 0.2214,
"step": 2260
},
{
"epoch": 0.906821132527714,
"grad_norm": 3.8697423934936523,
"learning_rate": 7.47718197375927e-06,
"loss": 0.1978,
"step": 2270
},
{
"epoch": 0.9108159392789373,
"grad_norm": 3.2078206539154053,
"learning_rate": 7.462920707358814e-06,
"loss": 0.2084,
"step": 2280
},
{
"epoch": 0.9148107460301608,
"grad_norm": 4.6600494384765625,
"learning_rate": 7.448659440958358e-06,
"loss": 0.2199,
"step": 2290
},
{
"epoch": 0.9188055527813842,
"grad_norm": 4.262337684631348,
"learning_rate": 7.434398174557901e-06,
"loss": 0.2115,
"step": 2300
},
{
"epoch": 0.9228003595326076,
"grad_norm": 4.174410820007324,
"learning_rate": 7.420136908157445e-06,
"loss": 0.1861,
"step": 2310
},
{
"epoch": 0.9267951662838311,
"grad_norm": 3.7125275135040283,
"learning_rate": 7.405875641756989e-06,
"loss": 0.2176,
"step": 2320
},
{
"epoch": 0.9307899730350544,
"grad_norm": 3.946467161178589,
"learning_rate": 7.391614375356533e-06,
"loss": 0.2119,
"step": 2330
},
{
"epoch": 0.9347847797862778,
"grad_norm": 4.180386066436768,
"learning_rate": 7.377353108956075e-06,
"loss": 0.1986,
"step": 2340
},
{
"epoch": 0.9387795865375013,
"grad_norm": 3.4374818801879883,
"learning_rate": 7.3630918425556194e-06,
"loss": 0.1921,
"step": 2350
},
{
"epoch": 0.9427743932887247,
"grad_norm": 3.786980390548706,
"learning_rate": 7.3488305761551635e-06,
"loss": 0.2065,
"step": 2360
},
{
"epoch": 0.946769200039948,
"grad_norm": 3.3135437965393066,
"learning_rate": 7.334569309754707e-06,
"loss": 0.2068,
"step": 2370
},
{
"epoch": 0.9507640067911715,
"grad_norm": 3.5033023357391357,
"learning_rate": 7.320308043354251e-06,
"loss": 0.2276,
"step": 2380
},
{
"epoch": 0.9547588135423949,
"grad_norm": 4.119509220123291,
"learning_rate": 7.306046776953795e-06,
"loss": 0.2009,
"step": 2390
},
{
"epoch": 0.9587536202936183,
"grad_norm": 4.391267776489258,
"learning_rate": 7.291785510553337e-06,
"loss": 0.1966,
"step": 2400
},
{
"epoch": 0.9627484270448418,
"grad_norm": 3.9926700592041016,
"learning_rate": 7.2775242441528815e-06,
"loss": 0.2245,
"step": 2410
},
{
"epoch": 0.9667432337960651,
"grad_norm": 4.617944240570068,
"learning_rate": 7.2632629777524256e-06,
"loss": 0.1978,
"step": 2420
},
{
"epoch": 0.9707380405472885,
"grad_norm": 3.33878231048584,
"learning_rate": 7.249001711351969e-06,
"loss": 0.1897,
"step": 2430
},
{
"epoch": 0.974732847298512,
"grad_norm": 3.526387929916382,
"learning_rate": 7.234740444951512e-06,
"loss": 0.1706,
"step": 2440
},
{
"epoch": 0.9787276540497354,
"grad_norm": 3.532831907272339,
"learning_rate": 7.220479178551056e-06,
"loss": 0.2061,
"step": 2450
},
{
"epoch": 0.9827224608009587,
"grad_norm": 4.9069743156433105,
"learning_rate": 7.206217912150599e-06,
"loss": 0.207,
"step": 2460
},
{
"epoch": 0.9867172675521821,
"grad_norm": 3.986372232437134,
"learning_rate": 7.1919566457501435e-06,
"loss": 0.2228,
"step": 2470
},
{
"epoch": 0.9907120743034056,
"grad_norm": 3.50144100189209,
"learning_rate": 7.1776953793496876e-06,
"loss": 0.1928,
"step": 2480
},
{
"epoch": 0.994706881054629,
"grad_norm": 3.42572283744812,
"learning_rate": 7.16343411294923e-06,
"loss": 0.2104,
"step": 2490
},
{
"epoch": 0.9987016878058524,
"grad_norm": 3.9188385009765625,
"learning_rate": 7.149172846548774e-06,
"loss": 0.2023,
"step": 2500
},
{
"epoch": 1.002396884050734,
"grad_norm": 2.447148084640503,
"learning_rate": 7.134911580148318e-06,
"loss": 0.1921,
"step": 2510
},
{
"epoch": 1.0063916908019575,
"grad_norm": 2.6271724700927734,
"learning_rate": 7.120650313747861e-06,
"loss": 0.1441,
"step": 2520
},
{
"epoch": 1.0103864975531809,
"grad_norm": 2.705094337463379,
"learning_rate": 7.1063890473474055e-06,
"loss": 0.1244,
"step": 2530
},
{
"epoch": 1.0143813043044043,
"grad_norm": 2.762216091156006,
"learning_rate": 7.092127780946949e-06,
"loss": 0.1305,
"step": 2540
},
{
"epoch": 1.0183761110556278,
"grad_norm": 3.086855888366699,
"learning_rate": 7.077866514546492e-06,
"loss": 0.1538,
"step": 2550
},
{
"epoch": 1.0223709178068512,
"grad_norm": 2.8569979667663574,
"learning_rate": 7.063605248146036e-06,
"loss": 0.1351,
"step": 2560
},
{
"epoch": 1.0263657245580744,
"grad_norm": 2.759294271469116,
"learning_rate": 7.04934398174558e-06,
"loss": 0.1457,
"step": 2570
},
{
"epoch": 1.0303605313092978,
"grad_norm": 3.2542331218719482,
"learning_rate": 7.035082715345123e-06,
"loss": 0.1191,
"step": 2580
},
{
"epoch": 1.0343553380605213,
"grad_norm": 2.826601028442383,
"learning_rate": 7.020821448944667e-06,
"loss": 0.1291,
"step": 2590
},
{
"epoch": 1.0383501448117447,
"grad_norm": 3.4102251529693604,
"learning_rate": 7.006560182544211e-06,
"loss": 0.1574,
"step": 2600
},
{
"epoch": 1.0423449515629681,
"grad_norm": 3.3488783836364746,
"learning_rate": 6.992298916143754e-06,
"loss": 0.1119,
"step": 2610
},
{
"epoch": 1.0463397583141916,
"grad_norm": 2.898468494415283,
"learning_rate": 6.978037649743298e-06,
"loss": 0.1437,
"step": 2620
},
{
"epoch": 1.050334565065415,
"grad_norm": 2.1492602825164795,
"learning_rate": 6.963776383342841e-06,
"loss": 0.1441,
"step": 2630
},
{
"epoch": 1.0543293718166384,
"grad_norm": 3.253286361694336,
"learning_rate": 6.949515116942385e-06,
"loss": 0.141,
"step": 2640
},
{
"epoch": 1.0583241785678619,
"grad_norm": 4.033700466156006,
"learning_rate": 6.935253850541929e-06,
"loss": 0.1474,
"step": 2650
},
{
"epoch": 1.062318985319085,
"grad_norm": 3.222522258758545,
"learning_rate": 6.920992584141473e-06,
"loss": 0.126,
"step": 2660
},
{
"epoch": 1.0663137920703085,
"grad_norm": 3.157158851623535,
"learning_rate": 6.906731317741015e-06,
"loss": 0.1451,
"step": 2670
},
{
"epoch": 1.070308598821532,
"grad_norm": 3.311572551727295,
"learning_rate": 6.892470051340559e-06,
"loss": 0.1338,
"step": 2680
},
{
"epoch": 1.0743034055727554,
"grad_norm": 2.2077033519744873,
"learning_rate": 6.878208784940103e-06,
"loss": 0.1155,
"step": 2690
},
{
"epoch": 1.0782982123239788,
"grad_norm": 2.602149248123169,
"learning_rate": 6.863947518539647e-06,
"loss": 0.124,
"step": 2700
},
{
"epoch": 1.0822930190752023,
"grad_norm": 3.072699785232544,
"learning_rate": 6.849686252139191e-06,
"loss": 0.1463,
"step": 2710
},
{
"epoch": 1.0862878258264257,
"grad_norm": 2.7928380966186523,
"learning_rate": 6.835424985738735e-06,
"loss": 0.1233,
"step": 2720
},
{
"epoch": 1.0902826325776491,
"grad_norm": 2.895634174346924,
"learning_rate": 6.821163719338277e-06,
"loss": 0.1333,
"step": 2730
},
{
"epoch": 1.0942774393288726,
"grad_norm": 2.9929039478302,
"learning_rate": 6.806902452937821e-06,
"loss": 0.1244,
"step": 2740
},
{
"epoch": 1.098272246080096,
"grad_norm": 2.920032501220703,
"learning_rate": 6.792641186537365e-06,
"loss": 0.1175,
"step": 2750
},
{
"epoch": 1.1022670528313192,
"grad_norm": 3.432638168334961,
"learning_rate": 6.778379920136909e-06,
"loss": 0.1406,
"step": 2760
},
{
"epoch": 1.1062618595825426,
"grad_norm": 2.5651278495788574,
"learning_rate": 6.764118653736452e-06,
"loss": 0.1303,
"step": 2770
},
{
"epoch": 1.110256666333766,
"grad_norm": 3.10604190826416,
"learning_rate": 6.749857387335996e-06,
"loss": 0.1388,
"step": 2780
},
{
"epoch": 1.1142514730849895,
"grad_norm": 3.3140065670013428,
"learning_rate": 6.735596120935539e-06,
"loss": 0.1368,
"step": 2790
},
{
"epoch": 1.118246279836213,
"grad_norm": 2.830920696258545,
"learning_rate": 6.721334854535083e-06,
"loss": 0.1234,
"step": 2800
},
{
"epoch": 1.1222410865874364,
"grad_norm": 3.0485241413116455,
"learning_rate": 6.7070735881346274e-06,
"loss": 0.1248,
"step": 2810
},
{
"epoch": 1.1262358933386598,
"grad_norm": 2.8369975090026855,
"learning_rate": 6.69281232173417e-06,
"loss": 0.1465,
"step": 2820
},
{
"epoch": 1.1302307000898832,
"grad_norm": 2.5955135822296143,
"learning_rate": 6.678551055333714e-06,
"loss": 0.1442,
"step": 2830
},
{
"epoch": 1.1342255068411067,
"grad_norm": 3.3618500232696533,
"learning_rate": 6.664289788933258e-06,
"loss": 0.1343,
"step": 2840
},
{
"epoch": 1.13822031359233,
"grad_norm": 2.4381425380706787,
"learning_rate": 6.650028522532801e-06,
"loss": 0.1302,
"step": 2850
},
{
"epoch": 1.1422151203435533,
"grad_norm": 3.9529569149017334,
"learning_rate": 6.635767256132345e-06,
"loss": 0.1243,
"step": 2860
},
{
"epoch": 1.1462099270947768,
"grad_norm": 2.211047410964966,
"learning_rate": 6.621505989731889e-06,
"loss": 0.1276,
"step": 2870
},
{
"epoch": 1.1502047338460002,
"grad_norm": 3.246532678604126,
"learning_rate": 6.607244723331432e-06,
"loss": 0.1377,
"step": 2880
},
{
"epoch": 1.1541995405972236,
"grad_norm": 2.9653186798095703,
"learning_rate": 6.592983456930976e-06,
"loss": 0.1431,
"step": 2890
},
{
"epoch": 1.158194347348447,
"grad_norm": 3.3144278526306152,
"learning_rate": 6.57872219053052e-06,
"loss": 0.1608,
"step": 2900
},
{
"epoch": 1.1621891540996705,
"grad_norm": 3.038832426071167,
"learning_rate": 6.5644609241300625e-06,
"loss": 0.1503,
"step": 2910
},
{
"epoch": 1.166183960850894,
"grad_norm": 2.9031283855438232,
"learning_rate": 6.5501996577296066e-06,
"loss": 0.1403,
"step": 2920
},
{
"epoch": 1.1701787676021174,
"grad_norm": 2.932734489440918,
"learning_rate": 6.535938391329151e-06,
"loss": 0.1162,
"step": 2930
},
{
"epoch": 1.1741735743533406,
"grad_norm": 3.1002743244171143,
"learning_rate": 6.521677124928694e-06,
"loss": 0.1486,
"step": 2940
},
{
"epoch": 1.178168381104564,
"grad_norm": 2.8298721313476562,
"learning_rate": 6.507415858528238e-06,
"loss": 0.1247,
"step": 2950
},
{
"epoch": 1.1821631878557874,
"grad_norm": 2.830777406692505,
"learning_rate": 6.493154592127781e-06,
"loss": 0.1253,
"step": 2960
},
{
"epoch": 1.1861579946070109,
"grad_norm": 2.3301286697387695,
"learning_rate": 6.478893325727325e-06,
"loss": 0.1302,
"step": 2970
},
{
"epoch": 1.1901528013582343,
"grad_norm": 2.502960205078125,
"learning_rate": 6.464632059326869e-06,
"loss": 0.1185,
"step": 2980
},
{
"epoch": 1.1941476081094577,
"grad_norm": 2.7609617710113525,
"learning_rate": 6.450370792926413e-06,
"loss": 0.126,
"step": 2990
},
{
"epoch": 1.1981424148606812,
"grad_norm": 2.7776389122009277,
"learning_rate": 6.436109526525957e-06,
"loss": 0.1371,
"step": 3000
},
{
"epoch": 1.2021372216119046,
"grad_norm": 2.744546413421631,
"learning_rate": 6.421848260125499e-06,
"loss": 0.1286,
"step": 3010
},
{
"epoch": 1.206132028363128,
"grad_norm": 3.670236825942993,
"learning_rate": 6.407586993725043e-06,
"loss": 0.1434,
"step": 3020
},
{
"epoch": 1.2101268351143513,
"grad_norm": 3.0598738193511963,
"learning_rate": 6.393325727324587e-06,
"loss": 0.1251,
"step": 3030
},
{
"epoch": 1.2141216418655747,
"grad_norm": 3.536545991897583,
"learning_rate": 6.379064460924131e-06,
"loss": 0.1399,
"step": 3040
},
{
"epoch": 1.2181164486167981,
"grad_norm": 4.090343952178955,
"learning_rate": 6.364803194523675e-06,
"loss": 0.1322,
"step": 3050
},
{
"epoch": 1.2221112553680216,
"grad_norm": 2.5921967029571533,
"learning_rate": 6.350541928123218e-06,
"loss": 0.1233,
"step": 3060
},
{
"epoch": 1.226106062119245,
"grad_norm": 2.831301689147949,
"learning_rate": 6.336280661722761e-06,
"loss": 0.1301,
"step": 3070
},
{
"epoch": 1.2301008688704684,
"grad_norm": 2.6285064220428467,
"learning_rate": 6.322019395322305e-06,
"loss": 0.1357,
"step": 3080
},
{
"epoch": 1.2340956756216919,
"grad_norm": 3.501486301422119,
"learning_rate": 6.307758128921849e-06,
"loss": 0.1387,
"step": 3090
},
{
"epoch": 1.2380904823729153,
"grad_norm": 3.47078800201416,
"learning_rate": 6.293496862521392e-06,
"loss": 0.1312,
"step": 3100
},
{
"epoch": 1.2420852891241387,
"grad_norm": 3.3166306018829346,
"learning_rate": 6.279235596120936e-06,
"loss": 0.1103,
"step": 3110
},
{
"epoch": 1.246080095875362,
"grad_norm": 2.7375359535217285,
"learning_rate": 6.26497432972048e-06,
"loss": 0.1456,
"step": 3120
},
{
"epoch": 1.2500749026265854,
"grad_norm": 2.8451974391937256,
"learning_rate": 6.250713063320023e-06,
"loss": 0.1323,
"step": 3130
},
{
"epoch": 1.2540697093778088,
"grad_norm": 3.141065835952759,
"learning_rate": 6.236451796919567e-06,
"loss": 0.1307,
"step": 3140
},
{
"epoch": 1.2580645161290323,
"grad_norm": 2.7583253383636475,
"learning_rate": 6.222190530519111e-06,
"loss": 0.1218,
"step": 3150
},
{
"epoch": 1.2620593228802557,
"grad_norm": 2.8716115951538086,
"learning_rate": 6.207929264118654e-06,
"loss": 0.1211,
"step": 3160
},
{
"epoch": 1.2660541296314791,
"grad_norm": 3.0804483890533447,
"learning_rate": 6.193667997718198e-06,
"loss": 0.1448,
"step": 3170
},
{
"epoch": 1.2700489363827026,
"grad_norm": 2.831940174102783,
"learning_rate": 6.179406731317742e-06,
"loss": 0.1195,
"step": 3180
},
{
"epoch": 1.2740437431339258,
"grad_norm": 3.2198126316070557,
"learning_rate": 6.165145464917285e-06,
"loss": 0.1337,
"step": 3190
},
{
"epoch": 1.2780385498851494,
"grad_norm": 3.0133731365203857,
"learning_rate": 6.1508841985168285e-06,
"loss": 0.1368,
"step": 3200
},
{
"epoch": 1.2820333566363726,
"grad_norm": 2.603764057159424,
"learning_rate": 6.136622932116373e-06,
"loss": 0.1169,
"step": 3210
},
{
"epoch": 1.286028163387596,
"grad_norm": 2.296199083328247,
"learning_rate": 6.122361665715916e-06,
"loss": 0.1143,
"step": 3220
},
{
"epoch": 1.2900229701388195,
"grad_norm": 2.61836838722229,
"learning_rate": 6.10810039931546e-06,
"loss": 0.119,
"step": 3230
},
{
"epoch": 1.294017776890043,
"grad_norm": 3.154714584350586,
"learning_rate": 6.093839132915004e-06,
"loss": 0.1283,
"step": 3240
},
{
"epoch": 1.2980125836412664,
"grad_norm": 3.181041717529297,
"learning_rate": 6.0795778665145464e-06,
"loss": 0.1149,
"step": 3250
},
{
"epoch": 1.3020073903924898,
"grad_norm": 2.668581008911133,
"learning_rate": 6.0653166001140905e-06,
"loss": 0.1225,
"step": 3260
},
{
"epoch": 1.3060021971437132,
"grad_norm": 3.9531939029693604,
"learning_rate": 6.051055333713635e-06,
"loss": 0.1317,
"step": 3270
},
{
"epoch": 1.3099970038949365,
"grad_norm": 3.74332594871521,
"learning_rate": 6.036794067313178e-06,
"loss": 0.1332,
"step": 3280
},
{
"epoch": 1.31399181064616,
"grad_norm": 2.8825340270996094,
"learning_rate": 6.022532800912721e-06,
"loss": 0.1334,
"step": 3290
},
{
"epoch": 1.3179866173973833,
"grad_norm": 2.882206916809082,
"learning_rate": 6.008271534512265e-06,
"loss": 0.1192,
"step": 3300
},
{
"epoch": 1.3219814241486068,
"grad_norm": 3.6865313053131104,
"learning_rate": 5.9940102681118085e-06,
"loss": 0.1203,
"step": 3310
},
{
"epoch": 1.3259762308998302,
"grad_norm": 3.1911978721618652,
"learning_rate": 5.9797490017113525e-06,
"loss": 0.1218,
"step": 3320
},
{
"epoch": 1.3299710376510536,
"grad_norm": 3.1022403240203857,
"learning_rate": 5.965487735310897e-06,
"loss": 0.1206,
"step": 3330
},
{
"epoch": 1.333965844402277,
"grad_norm": 2.3271052837371826,
"learning_rate": 5.951226468910439e-06,
"loss": 0.1333,
"step": 3340
},
{
"epoch": 1.3379606511535005,
"grad_norm": 2.547220230102539,
"learning_rate": 5.936965202509983e-06,
"loss": 0.1356,
"step": 3350
},
{
"epoch": 1.341955457904724,
"grad_norm": 3.0954415798187256,
"learning_rate": 5.922703936109527e-06,
"loss": 0.1094,
"step": 3360
},
{
"epoch": 1.3459502646559471,
"grad_norm": 2.3438808917999268,
"learning_rate": 5.9084426697090705e-06,
"loss": 0.1099,
"step": 3370
},
{
"epoch": 1.3499450714071708,
"grad_norm": 3.047748565673828,
"learning_rate": 5.8941814033086146e-06,
"loss": 0.1308,
"step": 3380
},
{
"epoch": 1.353939878158394,
"grad_norm": 2.9918041229248047,
"learning_rate": 5.879920136908158e-06,
"loss": 0.1325,
"step": 3390
},
{
"epoch": 1.3579346849096174,
"grad_norm": 3.4993977546691895,
"learning_rate": 5.865658870507701e-06,
"loss": 0.1454,
"step": 3400
},
{
"epoch": 1.3619294916608409,
"grad_norm": 4.395610809326172,
"learning_rate": 5.851397604107245e-06,
"loss": 0.1438,
"step": 3410
},
{
"epoch": 1.3659242984120643,
"grad_norm": 2.523089647293091,
"learning_rate": 5.837136337706789e-06,
"loss": 0.1202,
"step": 3420
},
{
"epoch": 1.3699191051632877,
"grad_norm": 2.9900269508361816,
"learning_rate": 5.822875071306332e-06,
"loss": 0.1404,
"step": 3430
},
{
"epoch": 1.3739139119145112,
"grad_norm": 2.8332931995391846,
"learning_rate": 5.808613804905876e-06,
"loss": 0.1307,
"step": 3440
},
{
"epoch": 1.3779087186657346,
"grad_norm": 3.6861443519592285,
"learning_rate": 5.79435253850542e-06,
"loss": 0.1308,
"step": 3450
},
{
"epoch": 1.3819035254169578,
"grad_norm": 3.442070722579956,
"learning_rate": 5.780091272104963e-06,
"loss": 0.1174,
"step": 3460
},
{
"epoch": 1.3858983321681815,
"grad_norm": 3.1186418533325195,
"learning_rate": 5.765830005704507e-06,
"loss": 0.1086,
"step": 3470
},
{
"epoch": 1.3898931389194047,
"grad_norm": 3.1245079040527344,
"learning_rate": 5.751568739304051e-06,
"loss": 0.1168,
"step": 3480
},
{
"epoch": 1.3938879456706281,
"grad_norm": 2.674656629562378,
"learning_rate": 5.737307472903594e-06,
"loss": 0.127,
"step": 3490
},
{
"epoch": 1.3978827524218516,
"grad_norm": 2.8337886333465576,
"learning_rate": 5.723046206503138e-06,
"loss": 0.1585,
"step": 3500
},
{
"epoch": 1.401877559173075,
"grad_norm": 3.2349750995635986,
"learning_rate": 5.708784940102682e-06,
"loss": 0.1329,
"step": 3510
},
{
"epoch": 1.4058723659242984,
"grad_norm": 3.044790744781494,
"learning_rate": 5.694523673702225e-06,
"loss": 0.1277,
"step": 3520
},
{
"epoch": 1.4098671726755219,
"grad_norm": 3.2932238578796387,
"learning_rate": 5.680262407301768e-06,
"loss": 0.1227,
"step": 3530
},
{
"epoch": 1.4138619794267453,
"grad_norm": 3.259983777999878,
"learning_rate": 5.6660011409013125e-06,
"loss": 0.1201,
"step": 3540
},
{
"epoch": 1.4178567861779685,
"grad_norm": 3.202609062194824,
"learning_rate": 5.651739874500856e-06,
"loss": 0.1436,
"step": 3550
},
{
"epoch": 1.4218515929291922,
"grad_norm": 2.5954806804656982,
"learning_rate": 5.6374786081004e-06,
"loss": 0.1204,
"step": 3560
},
{
"epoch": 1.4258463996804154,
"grad_norm": 2.437851667404175,
"learning_rate": 5.623217341699944e-06,
"loss": 0.1307,
"step": 3570
},
{
"epoch": 1.4298412064316388,
"grad_norm": 2.9584476947784424,
"learning_rate": 5.608956075299488e-06,
"loss": 0.1232,
"step": 3580
},
{
"epoch": 1.4338360131828622,
"grad_norm": 2.2511274814605713,
"learning_rate": 5.59469480889903e-06,
"loss": 0.1233,
"step": 3590
},
{
"epoch": 1.4378308199340857,
"grad_norm": 3.3061892986297607,
"learning_rate": 5.5804335424985745e-06,
"loss": 0.1174,
"step": 3600
},
{
"epoch": 1.4418256266853091,
"grad_norm": 2.1911284923553467,
"learning_rate": 5.566172276098119e-06,
"loss": 0.1263,
"step": 3610
},
{
"epoch": 1.4458204334365325,
"grad_norm": 3.9662625789642334,
"learning_rate": 5.551911009697661e-06,
"loss": 0.1367,
"step": 3620
},
{
"epoch": 1.449815240187756,
"grad_norm": 2.914937973022461,
"learning_rate": 5.537649743297205e-06,
"loss": 0.1349,
"step": 3630
},
{
"epoch": 1.4538100469389792,
"grad_norm": 3.0488805770874023,
"learning_rate": 5.523388476896749e-06,
"loss": 0.1309,
"step": 3640
},
{
"epoch": 1.4578048536902029,
"grad_norm": 3.5103049278259277,
"learning_rate": 5.509127210496292e-06,
"loss": 0.1419,
"step": 3650
},
{
"epoch": 1.461799660441426,
"grad_norm": 2.7047817707061768,
"learning_rate": 5.4948659440958365e-06,
"loss": 0.1254,
"step": 3660
},
{
"epoch": 1.4657944671926495,
"grad_norm": 2.7503600120544434,
"learning_rate": 5.480604677695381e-06,
"loss": 0.121,
"step": 3670
},
{
"epoch": 1.469789273943873,
"grad_norm": 3.51440167427063,
"learning_rate": 5.466343411294923e-06,
"loss": 0.1132,
"step": 3680
},
{
"epoch": 1.4737840806950964,
"grad_norm": 2.5173933506011963,
"learning_rate": 5.452082144894467e-06,
"loss": 0.1137,
"step": 3690
},
{
"epoch": 1.4777788874463198,
"grad_norm": 2.483872175216675,
"learning_rate": 5.437820878494011e-06,
"loss": 0.1283,
"step": 3700
},
{
"epoch": 1.4817736941975432,
"grad_norm": 2.768322229385376,
"learning_rate": 5.4235596120935544e-06,
"loss": 0.143,
"step": 3710
},
{
"epoch": 1.4857685009487667,
"grad_norm": 3.048396587371826,
"learning_rate": 5.409298345693098e-06,
"loss": 0.1258,
"step": 3720
},
{
"epoch": 1.4897633076999899,
"grad_norm": 2.805190324783325,
"learning_rate": 5.395037079292642e-06,
"loss": 0.1344,
"step": 3730
},
{
"epoch": 1.4937581144512135,
"grad_norm": 3.2144975662231445,
"learning_rate": 5.380775812892185e-06,
"loss": 0.1216,
"step": 3740
},
{
"epoch": 1.4977529212024367,
"grad_norm": 3.064366340637207,
"learning_rate": 5.366514546491729e-06,
"loss": 0.1255,
"step": 3750
},
{
"epoch": 1.5017477279536604,
"grad_norm": 2.593935966491699,
"learning_rate": 5.352253280091273e-06,
"loss": 0.1128,
"step": 3760
},
{
"epoch": 1.5057425347048836,
"grad_norm": 3.0325539112091064,
"learning_rate": 5.337992013690816e-06,
"loss": 0.128,
"step": 3770
},
{
"epoch": 1.509737341456107,
"grad_norm": 2.8857476711273193,
"learning_rate": 5.32373074729036e-06,
"loss": 0.1269,
"step": 3780
},
{
"epoch": 1.5137321482073305,
"grad_norm": 2.5328145027160645,
"learning_rate": 5.309469480889904e-06,
"loss": 0.1273,
"step": 3790
},
{
"epoch": 1.517726954958554,
"grad_norm": 2.4243452548980713,
"learning_rate": 5.295208214489447e-06,
"loss": 0.1101,
"step": 3800
},
{
"epoch": 1.5217217617097774,
"grad_norm": 2.239300012588501,
"learning_rate": 5.280946948088991e-06,
"loss": 0.1174,
"step": 3810
},
{
"epoch": 1.5257165684610006,
"grad_norm": 2.756876230239868,
"learning_rate": 5.266685681688534e-06,
"loss": 0.1059,
"step": 3820
},
{
"epoch": 1.5297113752122242,
"grad_norm": 1.9072672128677368,
"learning_rate": 5.252424415288078e-06,
"loss": 0.1061,
"step": 3830
},
{
"epoch": 1.5337061819634474,
"grad_norm": 3.0514819622039795,
"learning_rate": 5.238163148887622e-06,
"loss": 0.1186,
"step": 3840
},
{
"epoch": 1.537700988714671,
"grad_norm": 2.7836649417877197,
"learning_rate": 5.223901882487166e-06,
"loss": 0.1114,
"step": 3850
},
{
"epoch": 1.5416957954658943,
"grad_norm": 2.7533199787139893,
"learning_rate": 5.209640616086708e-06,
"loss": 0.1168,
"step": 3860
},
{
"epoch": 1.5456906022171177,
"grad_norm": 3.529172897338867,
"learning_rate": 5.195379349686252e-06,
"loss": 0.1409,
"step": 3870
},
{
"epoch": 1.5496854089683412,
"grad_norm": 3.022292137145996,
"learning_rate": 5.1811180832857964e-06,
"loss": 0.107,
"step": 3880
},
{
"epoch": 1.5536802157195646,
"grad_norm": 2.6646084785461426,
"learning_rate": 5.16685681688534e-06,
"loss": 0.1202,
"step": 3890
},
{
"epoch": 1.557675022470788,
"grad_norm": 2.9116556644439697,
"learning_rate": 5.152595550484884e-06,
"loss": 0.1289,
"step": 3900
},
{
"epoch": 1.5616698292220113,
"grad_norm": 2.6292238235473633,
"learning_rate": 5.138334284084428e-06,
"loss": 0.1191,
"step": 3910
},
{
"epoch": 1.565664635973235,
"grad_norm": 3.42012619972229,
"learning_rate": 5.12407301768397e-06,
"loss": 0.1219,
"step": 3920
},
{
"epoch": 1.5696594427244581,
"grad_norm": 3.27042555809021,
"learning_rate": 5.109811751283514e-06,
"loss": 0.1269,
"step": 3930
},
{
"epoch": 1.5736542494756818,
"grad_norm": 3.3910858631134033,
"learning_rate": 5.0955504848830584e-06,
"loss": 0.1098,
"step": 3940
},
{
"epoch": 1.577649056226905,
"grad_norm": 1.9884843826293945,
"learning_rate": 5.081289218482601e-06,
"loss": 0.1085,
"step": 3950
},
{
"epoch": 1.5816438629781284,
"grad_norm": 2.9106059074401855,
"learning_rate": 5.067027952082145e-06,
"loss": 0.1173,
"step": 3960
},
{
"epoch": 1.5856386697293519,
"grad_norm": 2.5543880462646484,
"learning_rate": 5.052766685681689e-06,
"loss": 0.1182,
"step": 3970
},
{
"epoch": 1.5896334764805753,
"grad_norm": 3.7071778774261475,
"learning_rate": 5.038505419281232e-06,
"loss": 0.1245,
"step": 3980
},
{
"epoch": 1.5936282832317987,
"grad_norm": 2.9774022102355957,
"learning_rate": 5.024244152880776e-06,
"loss": 0.1157,
"step": 3990
},
{
"epoch": 1.597623089983022,
"grad_norm": 2.8045384883880615,
"learning_rate": 5.0099828864803205e-06,
"loss": 0.11,
"step": 4000
},
{
"epoch": 1.597623089983022,
"eval_loss": 0.10785084217786789,
"eval_runtime": 8524.6921,
"eval_samples_per_second": 2.349,
"eval_steps_per_second": 0.294,
"eval_wer": 13.268986926675701,
"step": 4000
}
],
"logging_steps": 10,
"max_steps": 7512,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 2000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.3060885122514944e+20,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}