KexuanShi's picture
Upload 132 files
b7a4505 verified
{
"best_metric": 0.8973993679198728,
"best_model_checkpoint": "./nlu_finetuned_models/qqp/roberta-base_lr1e-05/checkpoint-204670",
"epoch": 10.0,
"eval_steps": 500,
"global_step": 204670,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.024429569550984513,
"grad_norm": 10.54019546508789,
"learning_rate": 4.0713296962788053e-07,
"loss": 0.6758,
"step": 500
},
{
"epoch": 0.048859139101969026,
"grad_norm": 7.6315178871154785,
"learning_rate": 8.142659392557611e-07,
"loss": 0.6406,
"step": 1000
},
{
"epoch": 0.07328870865295353,
"grad_norm": 25.296419143676758,
"learning_rate": 1.2213989088836414e-06,
"loss": 0.528,
"step": 1500
},
{
"epoch": 0.09771827820393805,
"grad_norm": 134.22874450683594,
"learning_rate": 1.6285318785115221e-06,
"loss": 0.4561,
"step": 2000
},
{
"epoch": 0.12214784775492256,
"grad_norm": 34.93831253051758,
"learning_rate": 2.0356648481394024e-06,
"loss": 0.4151,
"step": 2500
},
{
"epoch": 0.14657741730590707,
"grad_norm": 15.966097831726074,
"learning_rate": 2.442797817767283e-06,
"loss": 0.3976,
"step": 3000
},
{
"epoch": 0.17100698685689159,
"grad_norm": 19.514278411865234,
"learning_rate": 2.8499307873951637e-06,
"loss": 0.377,
"step": 3500
},
{
"epoch": 0.1954365564078761,
"grad_norm": 15.483463287353516,
"learning_rate": 3.2570637570230442e-06,
"loss": 0.3688,
"step": 4000
},
{
"epoch": 0.2198661259588606,
"grad_norm": 17.194320678710938,
"learning_rate": 3.6641967266509243e-06,
"loss": 0.373,
"step": 4500
},
{
"epoch": 0.24429569550984512,
"grad_norm": 7.317613124847412,
"learning_rate": 4.071329696278805e-06,
"loss": 0.3518,
"step": 5000
},
{
"epoch": 0.2687252650608296,
"grad_norm": 15.082125663757324,
"learning_rate": 4.478462665906685e-06,
"loss": 0.3587,
"step": 5500
},
{
"epoch": 0.29315483461181413,
"grad_norm": 49.410526275634766,
"learning_rate": 4.885595635534566e-06,
"loss": 0.3513,
"step": 6000
},
{
"epoch": 0.31758440416279865,
"grad_norm": 52.64762878417969,
"learning_rate": 5.292728605162446e-06,
"loss": 0.3426,
"step": 6500
},
{
"epoch": 0.34201397371378317,
"grad_norm": 21.950075149536133,
"learning_rate": 5.6998615747903275e-06,
"loss": 0.3401,
"step": 7000
},
{
"epoch": 0.3664435432647677,
"grad_norm": 14.155437469482422,
"learning_rate": 6.106994544418208e-06,
"loss": 0.3343,
"step": 7500
},
{
"epoch": 0.3908731128157522,
"grad_norm": 12.090170860290527,
"learning_rate": 6.5141275140460884e-06,
"loss": 0.3163,
"step": 8000
},
{
"epoch": 0.4153026823667367,
"grad_norm": 17.292564392089844,
"learning_rate": 6.921260483673968e-06,
"loss": 0.33,
"step": 8500
},
{
"epoch": 0.4397322519177212,
"grad_norm": 29.74077033996582,
"learning_rate": 7.3283934533018485e-06,
"loss": 0.3326,
"step": 9000
},
{
"epoch": 0.4641618214687057,
"grad_norm": 16.858999252319336,
"learning_rate": 7.73552642292973e-06,
"loss": 0.3229,
"step": 9500
},
{
"epoch": 0.48859139101969024,
"grad_norm": 16.417327880859375,
"learning_rate": 8.14265939255761e-06,
"loss": 0.3207,
"step": 10000
},
{
"epoch": 0.5130209605706747,
"grad_norm": 18.313976287841797,
"learning_rate": 8.54979236218549e-06,
"loss": 0.3154,
"step": 10500
},
{
"epoch": 0.5374505301216592,
"grad_norm": 26.479129791259766,
"learning_rate": 8.95692533181337e-06,
"loss": 0.3199,
"step": 11000
},
{
"epoch": 0.5618800996726437,
"grad_norm": 11.478958129882812,
"learning_rate": 9.364058301441251e-06,
"loss": 0.3041,
"step": 11500
},
{
"epoch": 0.5863096692236283,
"grad_norm": 18.56060028076172,
"learning_rate": 9.771191271069131e-06,
"loss": 0.3027,
"step": 12000
},
{
"epoch": 0.6107392387746128,
"grad_norm": 7.155969142913818,
"learning_rate": 9.988616812811544e-06,
"loss": 0.3122,
"step": 12500
},
{
"epoch": 0.6351688083255973,
"grad_norm": 8.636488914489746,
"learning_rate": 9.962627800965753e-06,
"loss": 0.3218,
"step": 13000
},
{
"epoch": 0.6595983778765818,
"grad_norm": 23.951885223388672,
"learning_rate": 9.936638789119961e-06,
"loss": 0.3109,
"step": 13500
},
{
"epoch": 0.6840279474275663,
"grad_norm": 12.705269813537598,
"learning_rate": 9.91064977727417e-06,
"loss": 0.3054,
"step": 14000
},
{
"epoch": 0.7084575169785509,
"grad_norm": 19.542213439941406,
"learning_rate": 9.884660765428378e-06,
"loss": 0.2983,
"step": 14500
},
{
"epoch": 0.7328870865295354,
"grad_norm": 18.7448673248291,
"learning_rate": 9.858671753582586e-06,
"loss": 0.2882,
"step": 15000
},
{
"epoch": 0.7573166560805199,
"grad_norm": 12.782718658447266,
"learning_rate": 9.832682741736795e-06,
"loss": 0.2956,
"step": 15500
},
{
"epoch": 0.7817462256315044,
"grad_norm": 24.18721580505371,
"learning_rate": 9.806693729891003e-06,
"loss": 0.2958,
"step": 16000
},
{
"epoch": 0.8061757951824888,
"grad_norm": 34.30314636230469,
"learning_rate": 9.78070471804521e-06,
"loss": 0.2972,
"step": 16500
},
{
"epoch": 0.8306053647334734,
"grad_norm": 52.42458724975586,
"learning_rate": 9.75471570619942e-06,
"loss": 0.2831,
"step": 17000
},
{
"epoch": 0.8550349342844579,
"grad_norm": 15.497232437133789,
"learning_rate": 9.728726694353629e-06,
"loss": 0.2976,
"step": 17500
},
{
"epoch": 0.8794645038354424,
"grad_norm": 30.495269775390625,
"learning_rate": 9.702737682507836e-06,
"loss": 0.2977,
"step": 18000
},
{
"epoch": 0.9038940733864269,
"grad_norm": 2.9801318645477295,
"learning_rate": 9.676748670662046e-06,
"loss": 0.2834,
"step": 18500
},
{
"epoch": 0.9283236429374114,
"grad_norm": 12.599749565124512,
"learning_rate": 9.650759658816253e-06,
"loss": 0.2919,
"step": 19000
},
{
"epoch": 0.952753212488396,
"grad_norm": 8.34764575958252,
"learning_rate": 9.624770646970461e-06,
"loss": 0.28,
"step": 19500
},
{
"epoch": 0.9771827820393805,
"grad_norm": 16.70107650756836,
"learning_rate": 9.59878163512467e-06,
"loss": 0.2802,
"step": 20000
},
{
"epoch": 1.0,
"eval_accuracy": 0.8791809811735606,
"eval_averaged_scores": 0.8627373437336334,
"eval_f1": 0.8462937062937063,
"eval_loss": 0.2766279876232147,
"eval_runtime": 20.1271,
"eval_samples_per_second": 1807.757,
"eval_steps_per_second": 113.031,
"step": 20467
},
{
"epoch": 1.001612351590365,
"grad_norm": 2.792717218399048,
"learning_rate": 9.572792623278878e-06,
"loss": 0.2782,
"step": 20500
},
{
"epoch": 1.0260419211413494,
"grad_norm": 8.300065040588379,
"learning_rate": 9.546803611433086e-06,
"loss": 0.2515,
"step": 21000
},
{
"epoch": 1.050471490692334,
"grad_norm": 29.82993507385254,
"learning_rate": 9.520814599587295e-06,
"loss": 0.2532,
"step": 21500
},
{
"epoch": 1.0749010602433184,
"grad_norm": 51.71006393432617,
"learning_rate": 9.494825587741504e-06,
"loss": 0.2411,
"step": 22000
},
{
"epoch": 1.099330629794303,
"grad_norm": 12.74963092803955,
"learning_rate": 9.468836575895712e-06,
"loss": 0.2545,
"step": 22500
},
{
"epoch": 1.1237601993452875,
"grad_norm": 11.051137924194336,
"learning_rate": 9.442847564049921e-06,
"loss": 0.2597,
"step": 23000
},
{
"epoch": 1.1481897688962721,
"grad_norm": 16.989593505859375,
"learning_rate": 9.416858552204129e-06,
"loss": 0.2508,
"step": 23500
},
{
"epoch": 1.1726193384472565,
"grad_norm": 18.592727661132812,
"learning_rate": 9.390869540358337e-06,
"loss": 0.2518,
"step": 24000
},
{
"epoch": 1.1970489079982412,
"grad_norm": 14.818976402282715,
"learning_rate": 9.364880528512546e-06,
"loss": 0.2497,
"step": 24500
},
{
"epoch": 1.2214784775492256,
"grad_norm": 11.762436866760254,
"learning_rate": 9.338891516666754e-06,
"loss": 0.2477,
"step": 25000
},
{
"epoch": 1.24590804710021,
"grad_norm": 3.5736145973205566,
"learning_rate": 9.312902504820961e-06,
"loss": 0.2557,
"step": 25500
},
{
"epoch": 1.2703376166511946,
"grad_norm": 17.63505744934082,
"learning_rate": 9.28691349297517e-06,
"loss": 0.251,
"step": 26000
},
{
"epoch": 1.2947671862021792,
"grad_norm": 13.431313514709473,
"learning_rate": 9.26092448112938e-06,
"loss": 0.2541,
"step": 26500
},
{
"epoch": 1.3191967557531636,
"grad_norm": 29.016206741333008,
"learning_rate": 9.234935469283588e-06,
"loss": 0.2417,
"step": 27000
},
{
"epoch": 1.343626325304148,
"grad_norm": 21.277286529541016,
"learning_rate": 9.208946457437797e-06,
"loss": 0.2466,
"step": 27500
},
{
"epoch": 1.3680558948551327,
"grad_norm": 12.927133560180664,
"learning_rate": 9.182957445592005e-06,
"loss": 0.2484,
"step": 28000
},
{
"epoch": 1.392485464406117,
"grad_norm": 14.714786529541016,
"learning_rate": 9.156968433746212e-06,
"loss": 0.2543,
"step": 28500
},
{
"epoch": 1.4169150339571017,
"grad_norm": 4.885636806488037,
"learning_rate": 9.130979421900422e-06,
"loss": 0.2554,
"step": 29000
},
{
"epoch": 1.4413446035080861,
"grad_norm": 13.911579132080078,
"learning_rate": 9.10499041005463e-06,
"loss": 0.2504,
"step": 29500
},
{
"epoch": 1.4657741730590708,
"grad_norm": 8.80788803100586,
"learning_rate": 9.079001398208837e-06,
"loss": 0.2379,
"step": 30000
},
{
"epoch": 1.4902037426100552,
"grad_norm": 14.809807777404785,
"learning_rate": 9.053012386363046e-06,
"loss": 0.2462,
"step": 30500
},
{
"epoch": 1.5146333121610396,
"grad_norm": 2.741872549057007,
"learning_rate": 9.027023374517256e-06,
"loss": 0.248,
"step": 31000
},
{
"epoch": 1.5390628817120242,
"grad_norm": 15.03188419342041,
"learning_rate": 9.001034362671463e-06,
"loss": 0.2523,
"step": 31500
},
{
"epoch": 1.5634924512630088,
"grad_norm": 13.394209861755371,
"learning_rate": 8.975045350825672e-06,
"loss": 0.2445,
"step": 32000
},
{
"epoch": 1.5879220208139933,
"grad_norm": 17.654428482055664,
"learning_rate": 8.94905633897988e-06,
"loss": 0.2419,
"step": 32500
},
{
"epoch": 1.6123515903649777,
"grad_norm": 2.1160449981689453,
"learning_rate": 8.923067327134088e-06,
"loss": 0.2423,
"step": 33000
},
{
"epoch": 1.6367811599159623,
"grad_norm": 9.247222900390625,
"learning_rate": 8.897078315288297e-06,
"loss": 0.2371,
"step": 33500
},
{
"epoch": 1.661210729466947,
"grad_norm": 12.957727432250977,
"learning_rate": 8.871089303442505e-06,
"loss": 0.245,
"step": 34000
},
{
"epoch": 1.6856402990179313,
"grad_norm": 18.7705020904541,
"learning_rate": 8.845100291596712e-06,
"loss": 0.2409,
"step": 34500
},
{
"epoch": 1.7100698685689157,
"grad_norm": 17.920528411865234,
"learning_rate": 8.819111279750922e-06,
"loss": 0.2369,
"step": 35000
},
{
"epoch": 1.7344994381199004,
"grad_norm": 6.508107662200928,
"learning_rate": 8.793122267905131e-06,
"loss": 0.2445,
"step": 35500
},
{
"epoch": 1.758929007670885,
"grad_norm": 15.203210830688477,
"learning_rate": 8.767133256059339e-06,
"loss": 0.2441,
"step": 36000
},
{
"epoch": 1.7833585772218692,
"grad_norm": 3.705578327178955,
"learning_rate": 8.741144244213548e-06,
"loss": 0.2359,
"step": 36500
},
{
"epoch": 1.8077881467728538,
"grad_norm": 26.21458625793457,
"learning_rate": 8.715155232367756e-06,
"loss": 0.2339,
"step": 37000
},
{
"epoch": 1.8322177163238385,
"grad_norm": 3.0099897384643555,
"learning_rate": 8.689166220521963e-06,
"loss": 0.2376,
"step": 37500
},
{
"epoch": 1.8566472858748229,
"grad_norm": 17.3660888671875,
"learning_rate": 8.663177208676173e-06,
"loss": 0.2415,
"step": 38000
},
{
"epoch": 1.8810768554258073,
"grad_norm": 23.040613174438477,
"learning_rate": 8.63718819683038e-06,
"loss": 0.236,
"step": 38500
},
{
"epoch": 1.905506424976792,
"grad_norm": 7.123594760894775,
"learning_rate": 8.61119918498459e-06,
"loss": 0.2372,
"step": 39000
},
{
"epoch": 1.9299359945277765,
"grad_norm": 34.456966400146484,
"learning_rate": 8.585210173138797e-06,
"loss": 0.2391,
"step": 39500
},
{
"epoch": 1.954365564078761,
"grad_norm": 23.899415969848633,
"learning_rate": 8.559221161293007e-06,
"loss": 0.2353,
"step": 40000
},
{
"epoch": 1.9787951336297454,
"grad_norm": 19.077669143676758,
"learning_rate": 8.533232149447214e-06,
"loss": 0.2369,
"step": 40500
},
{
"epoch": 2.0,
"eval_accuracy": 0.8980623883468463,
"eval_averaged_scores": 0.8814449118745056,
"eval_f1": 0.8648274354021648,
"eval_loss": 0.28552600741386414,
"eval_runtime": 20.2684,
"eval_samples_per_second": 1795.163,
"eval_steps_per_second": 112.244,
"step": 40934
},
{
"epoch": 2.00322470318073,
"grad_norm": 20.173015594482422,
"learning_rate": 8.507243137601424e-06,
"loss": 0.2315,
"step": 41000
},
{
"epoch": 2.0276542727317146,
"grad_norm": 39.991127014160156,
"learning_rate": 8.481254125755631e-06,
"loss": 0.1966,
"step": 41500
},
{
"epoch": 2.052083842282699,
"grad_norm": 30.70036506652832,
"learning_rate": 8.455265113909839e-06,
"loss": 0.2143,
"step": 42000
},
{
"epoch": 2.0765134118336834,
"grad_norm": 17.682048797607422,
"learning_rate": 8.429276102064048e-06,
"loss": 0.2001,
"step": 42500
},
{
"epoch": 2.100942981384668,
"grad_norm": 20.595016479492188,
"learning_rate": 8.403287090218256e-06,
"loss": 0.2088,
"step": 43000
},
{
"epoch": 2.1253725509356527,
"grad_norm": 4.142117977142334,
"learning_rate": 8.377298078372465e-06,
"loss": 0.2159,
"step": 43500
},
{
"epoch": 2.149802120486637,
"grad_norm": 1.1202857494354248,
"learning_rate": 8.351309066526673e-06,
"loss": 0.2031,
"step": 44000
},
{
"epoch": 2.1742316900376215,
"grad_norm": 26.033584594726562,
"learning_rate": 8.325320054680882e-06,
"loss": 0.216,
"step": 44500
},
{
"epoch": 2.198661259588606,
"grad_norm": 5.992620468139648,
"learning_rate": 8.29933104283509e-06,
"loss": 0.2099,
"step": 45000
},
{
"epoch": 2.2230908291395908,
"grad_norm": 1.6579467058181763,
"learning_rate": 8.2733420309893e-06,
"loss": 0.2048,
"step": 45500
},
{
"epoch": 2.247520398690575,
"grad_norm": 30.13793182373047,
"learning_rate": 8.247353019143507e-06,
"loss": 0.2148,
"step": 46000
},
{
"epoch": 2.2719499682415596,
"grad_norm": 15.029723167419434,
"learning_rate": 8.221364007297714e-06,
"loss": 0.199,
"step": 46500
},
{
"epoch": 2.2963795377925442,
"grad_norm": 7.096563816070557,
"learning_rate": 8.195374995451924e-06,
"loss": 0.2133,
"step": 47000
},
{
"epoch": 2.3208091073435284,
"grad_norm": 21.75437355041504,
"learning_rate": 8.169385983606131e-06,
"loss": 0.2149,
"step": 47500
},
{
"epoch": 2.345238676894513,
"grad_norm": 17.925037384033203,
"learning_rate": 8.14339697176034e-06,
"loss": 0.2006,
"step": 48000
},
{
"epoch": 2.3696682464454977,
"grad_norm": 3.024029493331909,
"learning_rate": 8.117407959914548e-06,
"loss": 0.2001,
"step": 48500
},
{
"epoch": 2.3940978159964823,
"grad_norm": 17.959518432617188,
"learning_rate": 8.091418948068758e-06,
"loss": 0.2068,
"step": 49000
},
{
"epoch": 2.4185273855474665,
"grad_norm": 12.003142356872559,
"learning_rate": 8.065429936222965e-06,
"loss": 0.2113,
"step": 49500
},
{
"epoch": 2.442956955098451,
"grad_norm": 18.196792602539062,
"learning_rate": 8.039440924377175e-06,
"loss": 0.2031,
"step": 50000
},
{
"epoch": 2.4673865246494358,
"grad_norm": 19.81887435913086,
"learning_rate": 8.013451912531382e-06,
"loss": 0.2156,
"step": 50500
},
{
"epoch": 2.49181609420042,
"grad_norm": 20.83788299560547,
"learning_rate": 7.98746290068559e-06,
"loss": 0.2036,
"step": 51000
},
{
"epoch": 2.5162456637514046,
"grad_norm": 28.70236587524414,
"learning_rate": 7.9614738888398e-06,
"loss": 0.2109,
"step": 51500
},
{
"epoch": 2.540675233302389,
"grad_norm": 28.118547439575195,
"learning_rate": 7.935484876994007e-06,
"loss": 0.2022,
"step": 52000
},
{
"epoch": 2.565104802853374,
"grad_norm": 5.818374156951904,
"learning_rate": 7.909495865148216e-06,
"loss": 0.2061,
"step": 52500
},
{
"epoch": 2.5895343724043585,
"grad_norm": 11.427398681640625,
"learning_rate": 7.883506853302424e-06,
"loss": 0.209,
"step": 53000
},
{
"epoch": 2.6139639419553427,
"grad_norm": 16.268341064453125,
"learning_rate": 7.857517841456633e-06,
"loss": 0.2123,
"step": 53500
},
{
"epoch": 2.6383935115063273,
"grad_norm": 23.71199607849121,
"learning_rate": 7.831528829610841e-06,
"loss": 0.2125,
"step": 54000
},
{
"epoch": 2.662823081057312,
"grad_norm": 27.364391326904297,
"learning_rate": 7.80553981776505e-06,
"loss": 0.2014,
"step": 54500
},
{
"epoch": 2.687252650608296,
"grad_norm": 27.82223129272461,
"learning_rate": 7.779550805919258e-06,
"loss": 0.1981,
"step": 55000
},
{
"epoch": 2.7116822201592807,
"grad_norm": 19.91096305847168,
"learning_rate": 7.753561794073467e-06,
"loss": 0.2048,
"step": 55500
},
{
"epoch": 2.7361117897102654,
"grad_norm": 22.202478408813477,
"learning_rate": 7.727572782227675e-06,
"loss": 0.2077,
"step": 56000
},
{
"epoch": 2.76054135926125,
"grad_norm": 15.403483390808105,
"learning_rate": 7.701583770381883e-06,
"loss": 0.2057,
"step": 56500
},
{
"epoch": 2.784970928812234,
"grad_norm": 2.367913007736206,
"learning_rate": 7.675594758536092e-06,
"loss": 0.2097,
"step": 57000
},
{
"epoch": 2.809400498363219,
"grad_norm": 30.33095932006836,
"learning_rate": 7.6496057466903e-06,
"loss": 0.2073,
"step": 57500
},
{
"epoch": 2.8338300679142034,
"grad_norm": 16.65831184387207,
"learning_rate": 7.623616734844508e-06,
"loss": 0.2109,
"step": 58000
},
{
"epoch": 2.8582596374651876,
"grad_norm": 26.70164680480957,
"learning_rate": 7.597627722998717e-06,
"loss": 0.1967,
"step": 58500
},
{
"epoch": 2.8826892070161723,
"grad_norm": 15.616362571716309,
"learning_rate": 7.571638711152926e-06,
"loss": 0.21,
"step": 59000
},
{
"epoch": 2.907118776567157,
"grad_norm": 1.022119164466858,
"learning_rate": 7.5456496993071335e-06,
"loss": 0.2154,
"step": 59500
},
{
"epoch": 2.9315483461181415,
"grad_norm": 23.004011154174805,
"learning_rate": 7.519660687461342e-06,
"loss": 0.2081,
"step": 60000
},
{
"epoch": 2.955977915669126,
"grad_norm": 49.54454040527344,
"learning_rate": 7.4936716756155505e-06,
"loss": 0.2066,
"step": 60500
},
{
"epoch": 2.9804074852201103,
"grad_norm": 22.06740379333496,
"learning_rate": 7.467682663769759e-06,
"loss": 0.2111,
"step": 61000
},
{
"epoch": 3.0,
"eval_accuracy": 0.9057578672529889,
"eval_averaged_scores": 0.8894141918163889,
"eval_f1": 0.873070516379789,
"eval_loss": 0.2923884093761444,
"eval_runtime": 20.2639,
"eval_samples_per_second": 1795.555,
"eval_steps_per_second": 112.268,
"step": 61401
},
{
"epoch": 3.004837054771095,
"grad_norm": 11.454232215881348,
"learning_rate": 7.441693651923967e-06,
"loss": 0.2018,
"step": 61500
},
{
"epoch": 3.0292666243220796,
"grad_norm": 21.45491600036621,
"learning_rate": 7.415704640078175e-06,
"loss": 0.1833,
"step": 62000
},
{
"epoch": 3.053696193873064,
"grad_norm": 0.8129162192344666,
"learning_rate": 7.389715628232384e-06,
"loss": 0.1801,
"step": 62500
},
{
"epoch": 3.0781257634240484,
"grad_norm": 0.3495664894580841,
"learning_rate": 7.363726616386593e-06,
"loss": 0.1776,
"step": 63000
},
{
"epoch": 3.102555332975033,
"grad_norm": 0.13657505810260773,
"learning_rate": 7.337737604540801e-06,
"loss": 0.182,
"step": 63500
},
{
"epoch": 3.1269849025260177,
"grad_norm": 86.07280731201172,
"learning_rate": 7.311748592695009e-06,
"loss": 0.1861,
"step": 64000
},
{
"epoch": 3.151414472077002,
"grad_norm": 24.988689422607422,
"learning_rate": 7.2857595808492175e-06,
"loss": 0.1776,
"step": 64500
},
{
"epoch": 3.1758440416279865,
"grad_norm": 37.55842208862305,
"learning_rate": 7.259770569003426e-06,
"loss": 0.1854,
"step": 65000
},
{
"epoch": 3.200273611178971,
"grad_norm": 69.79829406738281,
"learning_rate": 7.2337815571576345e-06,
"loss": 0.1747,
"step": 65500
},
{
"epoch": 3.2247031807299553,
"grad_norm": 28.17424201965332,
"learning_rate": 7.207792545311842e-06,
"loss": 0.1938,
"step": 66000
},
{
"epoch": 3.24913275028094,
"grad_norm": 56.655364990234375,
"learning_rate": 7.181803533466051e-06,
"loss": 0.1912,
"step": 66500
},
{
"epoch": 3.2735623198319246,
"grad_norm": 0.3666965067386627,
"learning_rate": 7.155814521620259e-06,
"loss": 0.1915,
"step": 67000
},
{
"epoch": 3.297991889382909,
"grad_norm": 9.555418014526367,
"learning_rate": 7.1298255097744685e-06,
"loss": 0.188,
"step": 67500
},
{
"epoch": 3.3224214589338934,
"grad_norm": 21.880718231201172,
"learning_rate": 7.103836497928677e-06,
"loss": 0.1824,
"step": 68000
},
{
"epoch": 3.346851028484878,
"grad_norm": 34.551021575927734,
"learning_rate": 7.077847486082885e-06,
"loss": 0.1886,
"step": 68500
},
{
"epoch": 3.3712805980358627,
"grad_norm": 18.153520584106445,
"learning_rate": 7.051858474237093e-06,
"loss": 0.1908,
"step": 69000
},
{
"epoch": 3.3957101675868473,
"grad_norm": 0.3916667103767395,
"learning_rate": 7.025869462391302e-06,
"loss": 0.182,
"step": 69500
},
{
"epoch": 3.4201397371378315,
"grad_norm": 28.306758880615234,
"learning_rate": 6.99988045054551e-06,
"loss": 0.1852,
"step": 70000
},
{
"epoch": 3.444569306688816,
"grad_norm": 29.383926391601562,
"learning_rate": 6.973891438699718e-06,
"loss": 0.1763,
"step": 70500
},
{
"epoch": 3.4689988762398007,
"grad_norm": 16.787750244140625,
"learning_rate": 6.947902426853926e-06,
"loss": 0.188,
"step": 71000
},
{
"epoch": 3.4934284457907854,
"grad_norm": 22.620866775512695,
"learning_rate": 6.921913415008135e-06,
"loss": 0.193,
"step": 71500
},
{
"epoch": 3.5178580153417696,
"grad_norm": 0.9201492667198181,
"learning_rate": 6.895924403162344e-06,
"loss": 0.186,
"step": 72000
},
{
"epoch": 3.542287584892754,
"grad_norm": 46.33378601074219,
"learning_rate": 6.8699353913165525e-06,
"loss": 0.1973,
"step": 72500
},
{
"epoch": 3.566717154443739,
"grad_norm": 3.13004207611084,
"learning_rate": 6.84394637947076e-06,
"loss": 0.2003,
"step": 73000
},
{
"epoch": 3.591146723994723,
"grad_norm": 29.041542053222656,
"learning_rate": 6.817957367624969e-06,
"loss": 0.1874,
"step": 73500
},
{
"epoch": 3.6155762935457076,
"grad_norm": 9.451910018920898,
"learning_rate": 6.791968355779177e-06,
"loss": 0.1757,
"step": 74000
},
{
"epoch": 3.6400058630966923,
"grad_norm": 0.4655057191848755,
"learning_rate": 6.765979343933386e-06,
"loss": 0.1895,
"step": 74500
},
{
"epoch": 3.664435432647677,
"grad_norm": 31.763437271118164,
"learning_rate": 6.739990332087593e-06,
"loss": 0.1822,
"step": 75000
},
{
"epoch": 3.6888650021986615,
"grad_norm": 38.27184295654297,
"learning_rate": 6.714001320241802e-06,
"loss": 0.1849,
"step": 75500
},
{
"epoch": 3.7132945717496457,
"grad_norm": 6.707937717437744,
"learning_rate": 6.68801230839601e-06,
"loss": 0.2007,
"step": 76000
},
{
"epoch": 3.7377241413006304,
"grad_norm": 30.213747024536133,
"learning_rate": 6.66202329655022e-06,
"loss": 0.1903,
"step": 76500
},
{
"epoch": 3.7621537108516145,
"grad_norm": 51.97517776489258,
"learning_rate": 6.636034284704428e-06,
"loss": 0.1875,
"step": 77000
},
{
"epoch": 3.786583280402599,
"grad_norm": 28.538976669311523,
"learning_rate": 6.610045272858637e-06,
"loss": 0.1857,
"step": 77500
},
{
"epoch": 3.811012849953584,
"grad_norm": 0.3994407057762146,
"learning_rate": 6.584056261012844e-06,
"loss": 0.1819,
"step": 78000
},
{
"epoch": 3.8354424195045684,
"grad_norm": 28.58909797668457,
"learning_rate": 6.558067249167053e-06,
"loss": 0.1787,
"step": 78500
},
{
"epoch": 3.859871989055553,
"grad_norm": 0.4431411325931549,
"learning_rate": 6.532078237321261e-06,
"loss": 0.1864,
"step": 79000
},
{
"epoch": 3.8843015586065373,
"grad_norm": 65.34574890136719,
"learning_rate": 6.50608922547547e-06,
"loss": 0.1844,
"step": 79500
},
{
"epoch": 3.908731128157522,
"grad_norm": 12.750852584838867,
"learning_rate": 6.480100213629677e-06,
"loss": 0.1824,
"step": 80000
},
{
"epoch": 3.9331606977085065,
"grad_norm": 39.058040618896484,
"learning_rate": 6.454111201783886e-06,
"loss": 0.1849,
"step": 80500
},
{
"epoch": 3.9575902672594907,
"grad_norm": 2.713413715362549,
"learning_rate": 6.428122189938095e-06,
"loss": 0.1768,
"step": 81000
},
{
"epoch": 3.9820198368104753,
"grad_norm": 1.6995596885681152,
"learning_rate": 6.402133178092304e-06,
"loss": 0.1835,
"step": 81500
},
{
"epoch": 4.0,
"eval_accuracy": 0.9077641885392332,
"eval_averaged_scores": 0.8907519212071636,
"eval_f1": 0.873739653875094,
"eval_loss": 0.33108219504356384,
"eval_runtime": 20.2844,
"eval_samples_per_second": 1793.747,
"eval_steps_per_second": 112.155,
"step": 81868
},
{
"epoch": 4.00644940636146,
"grad_norm": 0.7421801686286926,
"learning_rate": 6.376144166246512e-06,
"loss": 0.1796,
"step": 82000
},
{
"epoch": 4.030878975912445,
"grad_norm": 30.367263793945312,
"learning_rate": 6.35015515440072e-06,
"loss": 0.1649,
"step": 82500
},
{
"epoch": 4.055308545463429,
"grad_norm": 14.93786907196045,
"learning_rate": 6.324166142554928e-06,
"loss": 0.164,
"step": 83000
},
{
"epoch": 4.079738115014414,
"grad_norm": 0.6399831175804138,
"learning_rate": 6.298177130709137e-06,
"loss": 0.1605,
"step": 83500
},
{
"epoch": 4.104167684565398,
"grad_norm": 23.02483558654785,
"learning_rate": 6.272188118863345e-06,
"loss": 0.1659,
"step": 84000
},
{
"epoch": 4.128597254116382,
"grad_norm": 0.35685986280441284,
"learning_rate": 6.246199107017553e-06,
"loss": 0.1573,
"step": 84500
},
{
"epoch": 4.153026823667367,
"grad_norm": 52.071285247802734,
"learning_rate": 6.220210095171761e-06,
"loss": 0.1683,
"step": 85000
},
{
"epoch": 4.1774563932183515,
"grad_norm": 4.686119556427002,
"learning_rate": 6.194221083325971e-06,
"loss": 0.1641,
"step": 85500
},
{
"epoch": 4.201885962769336,
"grad_norm": 0.708002507686615,
"learning_rate": 6.168232071480179e-06,
"loss": 0.1603,
"step": 86000
},
{
"epoch": 4.226315532320321,
"grad_norm": 1.475074291229248,
"learning_rate": 6.142243059634388e-06,
"loss": 0.1671,
"step": 86500
},
{
"epoch": 4.250745101871305,
"grad_norm": 1.395318627357483,
"learning_rate": 6.116254047788595e-06,
"loss": 0.1575,
"step": 87000
},
{
"epoch": 4.275174671422289,
"grad_norm": 0.3956087827682495,
"learning_rate": 6.090265035942804e-06,
"loss": 0.1591,
"step": 87500
},
{
"epoch": 4.299604240973274,
"grad_norm": 0.6056288480758667,
"learning_rate": 6.064276024097012e-06,
"loss": 0.1614,
"step": 88000
},
{
"epoch": 4.324033810524258,
"grad_norm": 1.0418305397033691,
"learning_rate": 6.038287012251221e-06,
"loss": 0.1715,
"step": 88500
},
{
"epoch": 4.348463380075243,
"grad_norm": 1.0520128011703491,
"learning_rate": 6.0122980004054285e-06,
"loss": 0.1762,
"step": 89000
},
{
"epoch": 4.372892949626228,
"grad_norm": 0.8157211542129517,
"learning_rate": 5.986308988559637e-06,
"loss": 0.1702,
"step": 89500
},
{
"epoch": 4.397322519177212,
"grad_norm": 21.881351470947266,
"learning_rate": 5.9603199767138455e-06,
"loss": 0.1649,
"step": 90000
},
{
"epoch": 4.421752088728197,
"grad_norm": 21.02885627746582,
"learning_rate": 5.934330964868055e-06,
"loss": 0.1681,
"step": 90500
},
{
"epoch": 4.4461816582791815,
"grad_norm": 17.863567352294922,
"learning_rate": 5.908341953022263e-06,
"loss": 0.1705,
"step": 91000
},
{
"epoch": 4.470611227830165,
"grad_norm": 66.63270568847656,
"learning_rate": 5.882352941176471e-06,
"loss": 0.1674,
"step": 91500
},
{
"epoch": 4.49504079738115,
"grad_norm": 14.567200660705566,
"learning_rate": 5.856363929330679e-06,
"loss": 0.1612,
"step": 92000
},
{
"epoch": 4.5194703669321346,
"grad_norm": 2.0841243267059326,
"learning_rate": 5.830374917484888e-06,
"loss": 0.1588,
"step": 92500
},
{
"epoch": 4.543899936483119,
"grad_norm": 21.195947647094727,
"learning_rate": 5.804385905639096e-06,
"loss": 0.1686,
"step": 93000
},
{
"epoch": 4.568329506034104,
"grad_norm": 19.75173568725586,
"learning_rate": 5.778396893793304e-06,
"loss": 0.1718,
"step": 93500
},
{
"epoch": 4.5927590755850884,
"grad_norm": 34.48356628417969,
"learning_rate": 5.7524078819475125e-06,
"loss": 0.1693,
"step": 94000
},
{
"epoch": 4.617188645136073,
"grad_norm": 66.5281982421875,
"learning_rate": 5.726418870101721e-06,
"loss": 0.1654,
"step": 94500
},
{
"epoch": 4.641618214687057,
"grad_norm": 21.60127830505371,
"learning_rate": 5.70042985825593e-06,
"loss": 0.1525,
"step": 95000
},
{
"epoch": 4.6660477842380415,
"grad_norm": 53.85319900512695,
"learning_rate": 5.674440846410139e-06,
"loss": 0.1683,
"step": 95500
},
{
"epoch": 4.690477353789026,
"grad_norm": 21.81485366821289,
"learning_rate": 5.6484518345643465e-06,
"loss": 0.1597,
"step": 96000
},
{
"epoch": 4.714906923340011,
"grad_norm": 3.4359285831451416,
"learning_rate": 5.622462822718555e-06,
"loss": 0.1744,
"step": 96500
},
{
"epoch": 4.739336492890995,
"grad_norm": 37.74496078491211,
"learning_rate": 5.5964738108727635e-06,
"loss": 0.1549,
"step": 97000
},
{
"epoch": 4.76376606244198,
"grad_norm": 46.64603042602539,
"learning_rate": 5.570484799026972e-06,
"loss": 0.1628,
"step": 97500
},
{
"epoch": 4.788195631992965,
"grad_norm": 1.0236244201660156,
"learning_rate": 5.54449578718118e-06,
"loss": 0.1543,
"step": 98000
},
{
"epoch": 4.812625201543948,
"grad_norm": 75.760009765625,
"learning_rate": 5.518506775335388e-06,
"loss": 0.1788,
"step": 98500
},
{
"epoch": 4.837054771094933,
"grad_norm": 1.3948771953582764,
"learning_rate": 5.4925177634895966e-06,
"loss": 0.1675,
"step": 99000
},
{
"epoch": 4.861484340645918,
"grad_norm": 25.799287796020508,
"learning_rate": 5.466528751643806e-06,
"loss": 0.1756,
"step": 99500
},
{
"epoch": 4.885913910196902,
"grad_norm": 10.39145278930664,
"learning_rate": 5.440539739798014e-06,
"loss": 0.168,
"step": 100000
},
{
"epoch": 4.910343479747887,
"grad_norm": 0.5542752742767334,
"learning_rate": 5.414550727952223e-06,
"loss": 0.158,
"step": 100500
},
{
"epoch": 4.9347730492988715,
"grad_norm": 2.8913373947143555,
"learning_rate": 5.3885617161064305e-06,
"loss": 0.1703,
"step": 101000
},
{
"epoch": 4.959202618849856,
"grad_norm": 76.17244720458984,
"learning_rate": 5.362572704260639e-06,
"loss": 0.1571,
"step": 101500
},
{
"epoch": 4.98363218840084,
"grad_norm": 29.392118453979492,
"learning_rate": 5.3365836924148475e-06,
"loss": 0.1671,
"step": 102000
},
{
"epoch": 5.0,
"eval_accuracy": 0.9093857358801704,
"eval_averaged_scores": 0.893972399414541,
"eval_f1": 0.8785590629489115,
"eval_loss": 0.37774011492729187,
"eval_runtime": 20.1329,
"eval_samples_per_second": 1807.24,
"eval_steps_per_second": 112.999,
"step": 102335
},
{
"epoch": 5.0080617579518245,
"grad_norm": 0.33907392621040344,
"learning_rate": 5.310594680569056e-06,
"loss": 0.1594,
"step": 102500
},
{
"epoch": 5.032491327502809,
"grad_norm": 0.20179282128810883,
"learning_rate": 5.284605668723264e-06,
"loss": 0.1234,
"step": 103000
},
{
"epoch": 5.056920897053794,
"grad_norm": 0.2239135205745697,
"learning_rate": 5.258616656877472e-06,
"loss": 0.1398,
"step": 103500
},
{
"epoch": 5.081350466604778,
"grad_norm": 0.2501750886440277,
"learning_rate": 5.2326276450316815e-06,
"loss": 0.141,
"step": 104000
},
{
"epoch": 5.105780036155763,
"grad_norm": 27.644052505493164,
"learning_rate": 5.20663863318589e-06,
"loss": 0.1447,
"step": 104500
},
{
"epoch": 5.130209605706748,
"grad_norm": 0.7906270623207092,
"learning_rate": 5.1806496213400985e-06,
"loss": 0.1368,
"step": 105000
},
{
"epoch": 5.154639175257732,
"grad_norm": 0.17316019535064697,
"learning_rate": 5.154660609494306e-06,
"loss": 0.131,
"step": 105500
},
{
"epoch": 5.179068744808716,
"grad_norm": 15.188929557800293,
"learning_rate": 5.128671597648515e-06,
"loss": 0.1351,
"step": 106000
},
{
"epoch": 5.203498314359701,
"grad_norm": 0.39822831749916077,
"learning_rate": 5.102682585802723e-06,
"loss": 0.1424,
"step": 106500
},
{
"epoch": 5.227927883910685,
"grad_norm": 0.22537678480148315,
"learning_rate": 5.0766935739569316e-06,
"loss": 0.1369,
"step": 107000
},
{
"epoch": 5.25235745346167,
"grad_norm": 1.028478741645813,
"learning_rate": 5.050704562111139e-06,
"loss": 0.1358,
"step": 107500
},
{
"epoch": 5.276787023012655,
"grad_norm": 23.18048858642578,
"learning_rate": 5.024715550265348e-06,
"loss": 0.1335,
"step": 108000
},
{
"epoch": 5.301216592563639,
"grad_norm": 22.66480255126953,
"learning_rate": 4.998726538419556e-06,
"loss": 0.1358,
"step": 108500
},
{
"epoch": 5.325646162114624,
"grad_norm": 1.0610641241073608,
"learning_rate": 4.972737526573765e-06,
"loss": 0.1385,
"step": 109000
},
{
"epoch": 5.3500757316656085,
"grad_norm": 0.12008700519800186,
"learning_rate": 4.946748514727974e-06,
"loss": 0.1462,
"step": 109500
},
{
"epoch": 5.374505301216592,
"grad_norm": 0.47721561789512634,
"learning_rate": 4.920759502882182e-06,
"loss": 0.1407,
"step": 110000
},
{
"epoch": 5.398934870767577,
"grad_norm": 0.9414154291152954,
"learning_rate": 4.89477049103639e-06,
"loss": 0.1435,
"step": 110500
},
{
"epoch": 5.4233644403185615,
"grad_norm": 260.96466064453125,
"learning_rate": 4.868781479190599e-06,
"loss": 0.1319,
"step": 111000
},
{
"epoch": 5.447794009869546,
"grad_norm": 0.2496846616268158,
"learning_rate": 4.842792467344807e-06,
"loss": 0.1544,
"step": 111500
},
{
"epoch": 5.472223579420531,
"grad_norm": 57.217315673828125,
"learning_rate": 4.816803455499016e-06,
"loss": 0.1328,
"step": 112000
},
{
"epoch": 5.496653148971515,
"grad_norm": 0.7028847932815552,
"learning_rate": 4.790814443653224e-06,
"loss": 0.1396,
"step": 112500
},
{
"epoch": 5.5210827185225,
"grad_norm": 0.15381655097007751,
"learning_rate": 4.764825431807433e-06,
"loss": 0.1384,
"step": 113000
},
{
"epoch": 5.545512288073484,
"grad_norm": 1.1577835083007812,
"learning_rate": 4.73883641996164e-06,
"loss": 0.1447,
"step": 113500
},
{
"epoch": 5.569941857624468,
"grad_norm": 0.1358271986246109,
"learning_rate": 4.712847408115849e-06,
"loss": 0.1311,
"step": 114000
},
{
"epoch": 5.594371427175453,
"grad_norm": 44.117523193359375,
"learning_rate": 4.686858396270057e-06,
"loss": 0.1533,
"step": 114500
},
{
"epoch": 5.618800996726438,
"grad_norm": 1.6837427616119385,
"learning_rate": 4.660869384424266e-06,
"loss": 0.1392,
"step": 115000
},
{
"epoch": 5.643230566277422,
"grad_norm": 0.5804646611213684,
"learning_rate": 4.634880372578474e-06,
"loss": 0.1416,
"step": 115500
},
{
"epoch": 5.667660135828407,
"grad_norm": 6.186709880828857,
"learning_rate": 4.608891360732683e-06,
"loss": 0.1377,
"step": 116000
},
{
"epoch": 5.6920897053793915,
"grad_norm": 3.6101438999176025,
"learning_rate": 4.582902348886891e-06,
"loss": 0.1355,
"step": 116500
},
{
"epoch": 5.716519274930375,
"grad_norm": 59.04292297363281,
"learning_rate": 4.5569133370411e-06,
"loss": 0.1418,
"step": 117000
},
{
"epoch": 5.74094884448136,
"grad_norm": 103.19622802734375,
"learning_rate": 4.530924325195308e-06,
"loss": 0.1385,
"step": 117500
},
{
"epoch": 5.7653784140323445,
"grad_norm": 0.046471111476421356,
"learning_rate": 4.504935313349516e-06,
"loss": 0.1437,
"step": 118000
},
{
"epoch": 5.789807983583329,
"grad_norm": 14.723210334777832,
"learning_rate": 4.478946301503724e-06,
"loss": 0.1489,
"step": 118500
},
{
"epoch": 5.814237553134314,
"grad_norm": 0.23389513790607452,
"learning_rate": 4.452957289657933e-06,
"loss": 0.1368,
"step": 119000
},
{
"epoch": 5.838667122685298,
"grad_norm": 52.34353256225586,
"learning_rate": 4.426968277812141e-06,
"loss": 0.1366,
"step": 119500
},
{
"epoch": 5.863096692236283,
"grad_norm": 0.24756839871406555,
"learning_rate": 4.40097926596635e-06,
"loss": 0.1466,
"step": 120000
},
{
"epoch": 5.887526261787268,
"grad_norm": 12.284710884094238,
"learning_rate": 4.374990254120558e-06,
"loss": 0.1477,
"step": 120500
},
{
"epoch": 5.911955831338252,
"grad_norm": 4.85374116897583,
"learning_rate": 4.349001242274767e-06,
"loss": 0.1406,
"step": 121000
},
{
"epoch": 5.936385400889236,
"grad_norm": 23.76519012451172,
"learning_rate": 4.323012230428975e-06,
"loss": 0.1434,
"step": 121500
},
{
"epoch": 5.960814970440221,
"grad_norm": 0.7099828720092773,
"learning_rate": 4.297023218583184e-06,
"loss": 0.1524,
"step": 122000
},
{
"epoch": 5.985244539991205,
"grad_norm": 30.665578842163086,
"learning_rate": 4.271034206737391e-06,
"loss": 0.1381,
"step": 122500
},
{
"epoch": 6.0,
"eval_accuracy": 0.9105950254225642,
"eval_averaged_scores": 0.8952502575704396,
"eval_f1": 0.8799054897183151,
"eval_loss": 0.44903886318206787,
"eval_runtime": 20.1055,
"eval_samples_per_second": 1809.706,
"eval_steps_per_second": 113.153,
"step": 122802
},
{
"epoch": 6.00967410954219,
"grad_norm": 21.405393600463867,
"learning_rate": 4.2450451948916e-06,
"loss": 0.1166,
"step": 123000
},
{
"epoch": 6.034103679093175,
"grad_norm": 8.164361953735352,
"learning_rate": 4.219056183045809e-06,
"loss": 0.1086,
"step": 123500
},
{
"epoch": 6.058533248644159,
"grad_norm": 0.2232547402381897,
"learning_rate": 4.193067171200017e-06,
"loss": 0.1152,
"step": 124000
},
{
"epoch": 6.082962818195144,
"grad_norm": 72.73697662353516,
"learning_rate": 4.167078159354225e-06,
"loss": 0.109,
"step": 124500
},
{
"epoch": 6.107392387746128,
"grad_norm": 0.5123595595359802,
"learning_rate": 4.141089147508434e-06,
"loss": 0.1183,
"step": 125000
},
{
"epoch": 6.131821957297112,
"grad_norm": 0.04295356571674347,
"learning_rate": 4.115100135662642e-06,
"loss": 0.1102,
"step": 125500
},
{
"epoch": 6.156251526848097,
"grad_norm": 17.82163429260254,
"learning_rate": 4.089111123816851e-06,
"loss": 0.1059,
"step": 126000
},
{
"epoch": 6.1806810963990815,
"grad_norm": 0.28495362401008606,
"learning_rate": 4.063122111971059e-06,
"loss": 0.1053,
"step": 126500
},
{
"epoch": 6.205110665950066,
"grad_norm": 9.597000122070312,
"learning_rate": 4.037133100125267e-06,
"loss": 0.1244,
"step": 127000
},
{
"epoch": 6.229540235501051,
"grad_norm": 0.0872587263584137,
"learning_rate": 4.011144088279475e-06,
"loss": 0.1191,
"step": 127500
},
{
"epoch": 6.253969805052035,
"grad_norm": 22.000926971435547,
"learning_rate": 3.985155076433685e-06,
"loss": 0.1243,
"step": 128000
},
{
"epoch": 6.278399374603019,
"grad_norm": 9.766866683959961,
"learning_rate": 3.959166064587892e-06,
"loss": 0.1202,
"step": 128500
},
{
"epoch": 6.302828944154004,
"grad_norm": 12.51276969909668,
"learning_rate": 3.933177052742101e-06,
"loss": 0.1167,
"step": 129000
},
{
"epoch": 6.327258513704988,
"grad_norm": 0.99669349193573,
"learning_rate": 3.907188040896309e-06,
"loss": 0.1157,
"step": 129500
},
{
"epoch": 6.351688083255973,
"grad_norm": 0.4419931471347809,
"learning_rate": 3.881199029050518e-06,
"loss": 0.113,
"step": 130000
},
{
"epoch": 6.376117652806958,
"grad_norm": 0.0723571702837944,
"learning_rate": 3.855210017204726e-06,
"loss": 0.1239,
"step": 130500
},
{
"epoch": 6.400547222357942,
"grad_norm": 67.760009765625,
"learning_rate": 3.829221005358935e-06,
"loss": 0.1206,
"step": 131000
},
{
"epoch": 6.424976791908927,
"grad_norm": 145.9121551513672,
"learning_rate": 3.803231993513143e-06,
"loss": 0.1153,
"step": 131500
},
{
"epoch": 6.449406361459911,
"grad_norm": 14.276485443115234,
"learning_rate": 3.777242981667351e-06,
"loss": 0.1255,
"step": 132000
},
{
"epoch": 6.473835931010895,
"grad_norm": 0.9386702179908752,
"learning_rate": 3.75125396982156e-06,
"loss": 0.1196,
"step": 132500
},
{
"epoch": 6.49826550056188,
"grad_norm": 0.18407511711120605,
"learning_rate": 3.7252649579757684e-06,
"loss": 0.1138,
"step": 133000
},
{
"epoch": 6.5226950701128645,
"grad_norm": 0.8368340730667114,
"learning_rate": 3.6992759461299764e-06,
"loss": 0.1273,
"step": 133500
},
{
"epoch": 6.547124639663849,
"grad_norm": 8.838400840759277,
"learning_rate": 3.673286934284185e-06,
"loss": 0.1103,
"step": 134000
},
{
"epoch": 6.571554209214834,
"grad_norm": 0.08394081890583038,
"learning_rate": 3.647297922438393e-06,
"loss": 0.1205,
"step": 134500
},
{
"epoch": 6.595983778765818,
"grad_norm": 0.11461492627859116,
"learning_rate": 3.621308910592602e-06,
"loss": 0.1103,
"step": 135000
},
{
"epoch": 6.620413348316802,
"grad_norm": 0.16645289957523346,
"learning_rate": 3.5953198987468104e-06,
"loss": 0.1154,
"step": 135500
},
{
"epoch": 6.644842917867787,
"grad_norm": 288.46868896484375,
"learning_rate": 3.5693308869010185e-06,
"loss": 0.1281,
"step": 136000
},
{
"epoch": 6.669272487418771,
"grad_norm": 0.03742976859211922,
"learning_rate": 3.543341875055227e-06,
"loss": 0.1225,
"step": 136500
},
{
"epoch": 6.693702056969756,
"grad_norm": 0.650506854057312,
"learning_rate": 3.5173528632094355e-06,
"loss": 0.1271,
"step": 137000
},
{
"epoch": 6.718131626520741,
"grad_norm": 54.199398040771484,
"learning_rate": 3.491363851363644e-06,
"loss": 0.1315,
"step": 137500
},
{
"epoch": 6.742561196071725,
"grad_norm": 23.973060607910156,
"learning_rate": 3.465374839517852e-06,
"loss": 0.1241,
"step": 138000
},
{
"epoch": 6.76699076562271,
"grad_norm": 0.1303454339504242,
"learning_rate": 3.4393858276720605e-06,
"loss": 0.1214,
"step": 138500
},
{
"epoch": 6.791420335173695,
"grad_norm": 0.046723511070013046,
"learning_rate": 3.4133968158262686e-06,
"loss": 0.1143,
"step": 139000
},
{
"epoch": 6.815849904724679,
"grad_norm": 6.235154628753662,
"learning_rate": 3.3874078039804775e-06,
"loss": 0.124,
"step": 139500
},
{
"epoch": 6.840279474275663,
"grad_norm": 0.06593639403581619,
"learning_rate": 3.361418792134686e-06,
"loss": 0.122,
"step": 140000
},
{
"epoch": 6.864709043826648,
"grad_norm": 0.6418370604515076,
"learning_rate": 3.335429780288894e-06,
"loss": 0.1232,
"step": 140500
},
{
"epoch": 6.889138613377632,
"grad_norm": 0.2320944368839264,
"learning_rate": 3.3094407684431025e-06,
"loss": 0.1232,
"step": 141000
},
{
"epoch": 6.913568182928617,
"grad_norm": 30.253524780273438,
"learning_rate": 3.283451756597311e-06,
"loss": 0.1298,
"step": 141500
},
{
"epoch": 6.9379977524796015,
"grad_norm": 261.870849609375,
"learning_rate": 3.2574627447515195e-06,
"loss": 0.1192,
"step": 142000
},
{
"epoch": 6.962427322030586,
"grad_norm": 0.349097341299057,
"learning_rate": 3.2314737329057276e-06,
"loss": 0.1236,
"step": 142500
},
{
"epoch": 6.986856891581571,
"grad_norm": 0.3783528208732605,
"learning_rate": 3.205484721059936e-06,
"loss": 0.1148,
"step": 143000
},
{
"epoch": 7.0,
"eval_accuracy": 0.9117218634052494,
"eval_averaged_scores": 0.8971449328725574,
"eval_f1": 0.8825680023398654,
"eval_loss": 0.467751681804657,
"eval_runtime": 20.1427,
"eval_samples_per_second": 1806.36,
"eval_steps_per_second": 112.944,
"step": 143269
},
{
"epoch": 7.0112864611325545,
"grad_norm": 1.4202431440353394,
"learning_rate": 3.179495709214144e-06,
"loss": 0.1088,
"step": 143500
},
{
"epoch": 7.035716030683539,
"grad_norm": 0.1592591255903244,
"learning_rate": 3.153506697368353e-06,
"loss": 0.0945,
"step": 144000
},
{
"epoch": 7.060145600234524,
"grad_norm": 8.698958396911621,
"learning_rate": 3.1275176855225615e-06,
"loss": 0.0984,
"step": 144500
},
{
"epoch": 7.084575169785508,
"grad_norm": 0.0554330088198185,
"learning_rate": 3.1015286736767696e-06,
"loss": 0.0925,
"step": 145000
},
{
"epoch": 7.109004739336493,
"grad_norm": 0.3783748745918274,
"learning_rate": 3.075539661830978e-06,
"loss": 0.0914,
"step": 145500
},
{
"epoch": 7.133434308887478,
"grad_norm": 0.33608478307724,
"learning_rate": 3.049550649985186e-06,
"loss": 0.0978,
"step": 146000
},
{
"epoch": 7.157863878438462,
"grad_norm": 0.36914902925491333,
"learning_rate": 3.023561638139395e-06,
"loss": 0.0923,
"step": 146500
},
{
"epoch": 7.182293447989446,
"grad_norm": 0.05221285670995712,
"learning_rate": 2.9975726262936036e-06,
"loss": 0.0939,
"step": 147000
},
{
"epoch": 7.206723017540431,
"grad_norm": 0.09707313030958176,
"learning_rate": 2.9715836144478116e-06,
"loss": 0.0925,
"step": 147500
},
{
"epoch": 7.231152587091415,
"grad_norm": 0.21917091310024261,
"learning_rate": 2.94559460260202e-06,
"loss": 0.0947,
"step": 148000
},
{
"epoch": 7.2555821566424,
"grad_norm": 0.08681820333003998,
"learning_rate": 2.9196055907562286e-06,
"loss": 0.0965,
"step": 148500
},
{
"epoch": 7.2800117261933845,
"grad_norm": 0.27925539016723633,
"learning_rate": 2.893616578910437e-06,
"loss": 0.1044,
"step": 149000
},
{
"epoch": 7.304441295744369,
"grad_norm": 33.41240692138672,
"learning_rate": 2.867627567064645e-06,
"loss": 0.0968,
"step": 149500
},
{
"epoch": 7.328870865295354,
"grad_norm": 142.32989501953125,
"learning_rate": 2.8416385552188536e-06,
"loss": 0.0998,
"step": 150000
},
{
"epoch": 7.3533004348463376,
"grad_norm": 0.03225061669945717,
"learning_rate": 2.8156495433730617e-06,
"loss": 0.1013,
"step": 150500
},
{
"epoch": 7.377730004397322,
"grad_norm": 0.08403612673282623,
"learning_rate": 2.7896605315272706e-06,
"loss": 0.1009,
"step": 151000
},
{
"epoch": 7.402159573948307,
"grad_norm": 0.3820052742958069,
"learning_rate": 2.763671519681479e-06,
"loss": 0.0963,
"step": 151500
},
{
"epoch": 7.4265891434992914,
"grad_norm": 16.261816024780273,
"learning_rate": 2.737682507835687e-06,
"loss": 0.1029,
"step": 152000
},
{
"epoch": 7.451018713050276,
"grad_norm": 0.331718385219574,
"learning_rate": 2.7116934959898957e-06,
"loss": 0.1041,
"step": 152500
},
{
"epoch": 7.475448282601261,
"grad_norm": 1.3978043794631958,
"learning_rate": 2.685704484144104e-06,
"loss": 0.0921,
"step": 153000
},
{
"epoch": 7.499877852152245,
"grad_norm": 0.04950063303112984,
"learning_rate": 2.6597154722983127e-06,
"loss": 0.0959,
"step": 153500
},
{
"epoch": 7.52430742170323,
"grad_norm": 0.14950354397296906,
"learning_rate": 2.6337264604525207e-06,
"loss": 0.0886,
"step": 154000
},
{
"epoch": 7.548736991254214,
"grad_norm": 37.28718566894531,
"learning_rate": 2.607737448606729e-06,
"loss": 0.1035,
"step": 154500
},
{
"epoch": 7.573166560805198,
"grad_norm": 0.17789612710475922,
"learning_rate": 2.5817484367609373e-06,
"loss": 0.0945,
"step": 155000
},
{
"epoch": 7.597596130356183,
"grad_norm": 0.2735101580619812,
"learning_rate": 2.555759424915146e-06,
"loss": 0.0996,
"step": 155500
},
{
"epoch": 7.622025699907168,
"grad_norm": 162.47628784179688,
"learning_rate": 2.5297704130693547e-06,
"loss": 0.1001,
"step": 156000
},
{
"epoch": 7.646455269458152,
"grad_norm": 0.21928128600120544,
"learning_rate": 2.5037814012235627e-06,
"loss": 0.1058,
"step": 156500
},
{
"epoch": 7.670884839009137,
"grad_norm": 8.425532341003418,
"learning_rate": 2.4777923893777712e-06,
"loss": 0.0884,
"step": 157000
},
{
"epoch": 7.6953144085601215,
"grad_norm": 0.1196184754371643,
"learning_rate": 2.4518033775319797e-06,
"loss": 0.1073,
"step": 157500
},
{
"epoch": 7.719743978111106,
"grad_norm": 18.718048095703125,
"learning_rate": 2.425814365686188e-06,
"loss": 0.0884,
"step": 158000
},
{
"epoch": 7.74417354766209,
"grad_norm": 37.17155075073242,
"learning_rate": 2.3998253538403967e-06,
"loss": 0.0945,
"step": 158500
},
{
"epoch": 7.7686031172130745,
"grad_norm": 0.38690435886383057,
"learning_rate": 2.3738363419946048e-06,
"loss": 0.0976,
"step": 159000
},
{
"epoch": 7.793032686764059,
"grad_norm": 13.13317584991455,
"learning_rate": 2.3478473301488133e-06,
"loss": 0.0986,
"step": 159500
},
{
"epoch": 7.817462256315044,
"grad_norm": 0.05163729190826416,
"learning_rate": 2.3218583183030217e-06,
"loss": 0.1,
"step": 160000
},
{
"epoch": 7.841891825866028,
"grad_norm": 25.491252899169922,
"learning_rate": 2.2958693064572302e-06,
"loss": 0.0874,
"step": 160500
},
{
"epoch": 7.866321395417013,
"grad_norm": 0.3773481547832489,
"learning_rate": 2.2698802946114383e-06,
"loss": 0.1027,
"step": 161000
},
{
"epoch": 7.890750964967998,
"grad_norm": 13.568100929260254,
"learning_rate": 2.243891282765647e-06,
"loss": 0.1062,
"step": 161500
},
{
"epoch": 7.915180534518981,
"grad_norm": 24.115978240966797,
"learning_rate": 2.2179022709198553e-06,
"loss": 0.0934,
"step": 162000
},
{
"epoch": 7.939610104069966,
"grad_norm": 0.012055924162268639,
"learning_rate": 2.1919132590740634e-06,
"loss": 0.0854,
"step": 162500
},
{
"epoch": 7.964039673620951,
"grad_norm": 0.3433281481266022,
"learning_rate": 2.1659242472282723e-06,
"loss": 0.1038,
"step": 163000
},
{
"epoch": 7.988469243171935,
"grad_norm": 0.2032276839017868,
"learning_rate": 2.1399352353824803e-06,
"loss": 0.0992,
"step": 163500
},
{
"epoch": 8.0,
"eval_accuracy": 0.9103476707434383,
"eval_averaged_scores": 0.8959594874193519,
"eval_f1": 0.8815713040952657,
"eval_loss": 0.5451585650444031,
"eval_runtime": 20.179,
"eval_samples_per_second": 1803.108,
"eval_steps_per_second": 112.741,
"step": 163736
},
{
"epoch": 8.01289881272292,
"grad_norm": 0.012172297574579716,
"learning_rate": 2.113946223536689e-06,
"loss": 0.0873,
"step": 164000
},
{
"epoch": 8.037328382273904,
"grad_norm": 0.4155900180339813,
"learning_rate": 2.0879572116908973e-06,
"loss": 0.0777,
"step": 164500
},
{
"epoch": 8.06175795182489,
"grad_norm": 0.19762279093265533,
"learning_rate": 2.061968199845106e-06,
"loss": 0.0829,
"step": 165000
},
{
"epoch": 8.086187521375873,
"grad_norm": 0.1475783735513687,
"learning_rate": 2.035979187999314e-06,
"loss": 0.0707,
"step": 165500
},
{
"epoch": 8.110617090926858,
"grad_norm": 145.56173706054688,
"learning_rate": 2.0099901761535224e-06,
"loss": 0.0751,
"step": 166000
},
{
"epoch": 8.135046660477842,
"grad_norm": 0.041457872837781906,
"learning_rate": 1.984001164307731e-06,
"loss": 0.0671,
"step": 166500
},
{
"epoch": 8.159476230028828,
"grad_norm": 5.121572971343994,
"learning_rate": 1.9580121524619393e-06,
"loss": 0.078,
"step": 167000
},
{
"epoch": 8.183905799579811,
"grad_norm": 0.2376221865415573,
"learning_rate": 1.932023140616148e-06,
"loss": 0.0842,
"step": 167500
},
{
"epoch": 8.208335369130795,
"grad_norm": 27.775150299072266,
"learning_rate": 1.9060341287703559e-06,
"loss": 0.0767,
"step": 168000
},
{
"epoch": 8.23276493868178,
"grad_norm": 0.18645720183849335,
"learning_rate": 1.8800451169245646e-06,
"loss": 0.0802,
"step": 168500
},
{
"epoch": 8.257194508232764,
"grad_norm": 7.003711700439453,
"learning_rate": 1.8540561050787729e-06,
"loss": 0.0803,
"step": 169000
},
{
"epoch": 8.28162407778375,
"grad_norm": 44.9241828918457,
"learning_rate": 1.8280670932329814e-06,
"loss": 0.08,
"step": 169500
},
{
"epoch": 8.306053647334734,
"grad_norm": 0.3100529909133911,
"learning_rate": 1.8020780813871896e-06,
"loss": 0.0807,
"step": 170000
},
{
"epoch": 8.33048321688572,
"grad_norm": 1.1184957027435303,
"learning_rate": 1.776089069541398e-06,
"loss": 0.0744,
"step": 170500
},
{
"epoch": 8.354912786436703,
"grad_norm": 0.22304557263851166,
"learning_rate": 1.7501000576956064e-06,
"loss": 0.0784,
"step": 171000
},
{
"epoch": 8.379342355987687,
"grad_norm": 0.036572836339473724,
"learning_rate": 1.7241110458498147e-06,
"loss": 0.077,
"step": 171500
},
{
"epoch": 8.403771925538672,
"grad_norm": 0.1464405059814453,
"learning_rate": 1.6981220340040234e-06,
"loss": 0.0834,
"step": 172000
},
{
"epoch": 8.428201495089656,
"grad_norm": 0.08662763237953186,
"learning_rate": 1.6721330221582317e-06,
"loss": 0.0787,
"step": 172500
},
{
"epoch": 8.452631064640642,
"grad_norm": 1.44623601436615,
"learning_rate": 1.6461440103124402e-06,
"loss": 0.0726,
"step": 173000
},
{
"epoch": 8.477060634191625,
"grad_norm": 14.024663925170898,
"learning_rate": 1.6201549984666484e-06,
"loss": 0.0907,
"step": 173500
},
{
"epoch": 8.50149020374261,
"grad_norm": 0.02955411933362484,
"learning_rate": 1.5941659866208567e-06,
"loss": 0.0836,
"step": 174000
},
{
"epoch": 8.525919773293595,
"grad_norm": 0.1095781922340393,
"learning_rate": 1.5681769747750652e-06,
"loss": 0.0881,
"step": 174500
},
{
"epoch": 8.550349342844578,
"grad_norm": 0.8175545930862427,
"learning_rate": 1.5421879629292735e-06,
"loss": 0.0858,
"step": 175000
},
{
"epoch": 8.574778912395564,
"grad_norm": 0.0535324402153492,
"learning_rate": 1.5161989510834822e-06,
"loss": 0.0781,
"step": 175500
},
{
"epoch": 8.599208481946548,
"grad_norm": 0.0196294616907835,
"learning_rate": 1.4902099392376905e-06,
"loss": 0.0991,
"step": 176000
},
{
"epoch": 8.623638051497533,
"grad_norm": 3.7140159606933594,
"learning_rate": 1.464220927391899e-06,
"loss": 0.0747,
"step": 176500
},
{
"epoch": 8.648067621048517,
"grad_norm": 101.7074966430664,
"learning_rate": 1.4382319155461072e-06,
"loss": 0.0914,
"step": 177000
},
{
"epoch": 8.672497190599502,
"grad_norm": 0.013971562497317791,
"learning_rate": 1.4122429037003157e-06,
"loss": 0.0681,
"step": 177500
},
{
"epoch": 8.696926760150486,
"grad_norm": 164.79086303710938,
"learning_rate": 1.386253891854524e-06,
"loss": 0.0789,
"step": 178000
},
{
"epoch": 8.721356329701472,
"grad_norm": 0.1486426293849945,
"learning_rate": 1.3602648800087323e-06,
"loss": 0.0847,
"step": 178500
},
{
"epoch": 8.745785899252455,
"grad_norm": 32.686614990234375,
"learning_rate": 1.3342758681629408e-06,
"loss": 0.0914,
"step": 179000
},
{
"epoch": 8.770215468803439,
"grad_norm": 0.8406556844711304,
"learning_rate": 1.308286856317149e-06,
"loss": 0.0742,
"step": 179500
},
{
"epoch": 8.794645038354425,
"grad_norm": 0.07689370214939117,
"learning_rate": 1.2822978444713577e-06,
"loss": 0.0791,
"step": 180000
},
{
"epoch": 8.819074607905408,
"grad_norm": 17.384950637817383,
"learning_rate": 1.256308832625566e-06,
"loss": 0.0764,
"step": 180500
},
{
"epoch": 8.843504177456394,
"grad_norm": 29.185670852661133,
"learning_rate": 1.2303198207797743e-06,
"loss": 0.0862,
"step": 181000
},
{
"epoch": 8.867933747007378,
"grad_norm": 58.11179733276367,
"learning_rate": 1.2043308089339828e-06,
"loss": 0.0862,
"step": 181500
},
{
"epoch": 8.892363316558363,
"grad_norm": 0.12057217955589294,
"learning_rate": 1.1783417970881913e-06,
"loss": 0.08,
"step": 182000
},
{
"epoch": 8.916792886109347,
"grad_norm": 67.25086212158203,
"learning_rate": 1.1523527852423996e-06,
"loss": 0.0881,
"step": 182500
},
{
"epoch": 8.94122245566033,
"grad_norm": 0.04171622171998024,
"learning_rate": 1.126363773396608e-06,
"loss": 0.0831,
"step": 183000
},
{
"epoch": 8.965652025211316,
"grad_norm": 0.3835027515888214,
"learning_rate": 1.1003747615508165e-06,
"loss": 0.0768,
"step": 183500
},
{
"epoch": 8.9900815947623,
"grad_norm": 0.16164089739322662,
"learning_rate": 1.0743857497050248e-06,
"loss": 0.0847,
"step": 184000
},
{
"epoch": 9.0,
"eval_accuracy": 0.9122440566167377,
"eval_averaged_scores": 0.8971997179189981,
"eval_f1": 0.8821553792212585,
"eval_loss": 0.5808023810386658,
"eval_runtime": 20.2131,
"eval_samples_per_second": 1800.067,
"eval_steps_per_second": 112.551,
"step": 184203
},
{
"epoch": 9.014511164313285,
"grad_norm": 0.031089797616004944,
"learning_rate": 1.048396737859233e-06,
"loss": 0.072,
"step": 184500
},
{
"epoch": 9.038940733864269,
"grad_norm": 0.0398230142891407,
"learning_rate": 1.0224077260134416e-06,
"loss": 0.0608,
"step": 185000
},
{
"epoch": 9.063370303415255,
"grad_norm": 0.15601158142089844,
"learning_rate": 9.9641871416765e-07,
"loss": 0.0689,
"step": 185500
},
{
"epoch": 9.087799872966238,
"grad_norm": 1.7415249347686768,
"learning_rate": 9.704297023218584e-07,
"loss": 0.0676,
"step": 186000
},
{
"epoch": 9.112229442517222,
"grad_norm": 0.036363422870635986,
"learning_rate": 9.444406904760668e-07,
"loss": 0.0696,
"step": 186500
},
{
"epoch": 9.136659012068208,
"grad_norm": 16.439847946166992,
"learning_rate": 9.184516786302752e-07,
"loss": 0.0704,
"step": 187000
},
{
"epoch": 9.161088581619191,
"grad_norm": 0.16665898263454437,
"learning_rate": 8.924626667844836e-07,
"loss": 0.0706,
"step": 187500
},
{
"epoch": 9.185518151170177,
"grad_norm": 0.006615887396037579,
"learning_rate": 8.664736549386919e-07,
"loss": 0.0718,
"step": 188000
},
{
"epoch": 9.20994772072116,
"grad_norm": 0.015520376153290272,
"learning_rate": 8.404846430929004e-07,
"loss": 0.0639,
"step": 188500
},
{
"epoch": 9.234377290272146,
"grad_norm": 0.18699295818805695,
"learning_rate": 8.144956312471088e-07,
"loss": 0.0671,
"step": 189000
},
{
"epoch": 9.25880685982313,
"grad_norm": 208.81222534179688,
"learning_rate": 7.885066194013171e-07,
"loss": 0.0577,
"step": 189500
},
{
"epoch": 9.283236429374114,
"grad_norm": 35.29654312133789,
"learning_rate": 7.625176075555256e-07,
"loss": 0.0754,
"step": 190000
},
{
"epoch": 9.3076659989251,
"grad_norm": 0.024813145399093628,
"learning_rate": 7.36528595709734e-07,
"loss": 0.073,
"step": 190500
},
{
"epoch": 9.332095568476083,
"grad_norm": 0.0019479466136544943,
"learning_rate": 7.105395838639424e-07,
"loss": 0.0621,
"step": 191000
},
{
"epoch": 9.356525138027068,
"grad_norm": 0.034291163086891174,
"learning_rate": 6.845505720181508e-07,
"loss": 0.0646,
"step": 191500
},
{
"epoch": 9.380954707578052,
"grad_norm": 21.998003005981445,
"learning_rate": 6.585615601723591e-07,
"loss": 0.058,
"step": 192000
},
{
"epoch": 9.405384277129038,
"grad_norm": 0.04920468479394913,
"learning_rate": 6.325725483265676e-07,
"loss": 0.0834,
"step": 192500
},
{
"epoch": 9.429813846680021,
"grad_norm": 0.0892450362443924,
"learning_rate": 6.065835364807759e-07,
"loss": 0.0666,
"step": 193000
},
{
"epoch": 9.454243416231005,
"grad_norm": 0.2844100892543793,
"learning_rate": 5.805945246349843e-07,
"loss": 0.0583,
"step": 193500
},
{
"epoch": 9.47867298578199,
"grad_norm": 0.26199427247047424,
"learning_rate": 5.546055127891928e-07,
"loss": 0.0719,
"step": 194000
},
{
"epoch": 9.503102555332974,
"grad_norm": 0.016245270147919655,
"learning_rate": 5.286165009434012e-07,
"loss": 0.072,
"step": 194500
},
{
"epoch": 9.52753212488396,
"grad_norm": 0.2348903864622116,
"learning_rate": 5.026274890976096e-07,
"loss": 0.0594,
"step": 195000
},
{
"epoch": 9.551961694434944,
"grad_norm": 0.00560354720801115,
"learning_rate": 4.7663847725181796e-07,
"loss": 0.0601,
"step": 195500
},
{
"epoch": 9.57639126398593,
"grad_norm": 0.20233391225337982,
"learning_rate": 4.5064946540602635e-07,
"loss": 0.0597,
"step": 196000
},
{
"epoch": 9.600820833536913,
"grad_norm": 0.028764836490154266,
"learning_rate": 4.246604535602348e-07,
"loss": 0.0748,
"step": 196500
},
{
"epoch": 9.625250403087897,
"grad_norm": 0.03489026054739952,
"learning_rate": 3.986714417144431e-07,
"loss": 0.0593,
"step": 197000
},
{
"epoch": 9.649679972638882,
"grad_norm": 0.17677338421344757,
"learning_rate": 3.7268242986865155e-07,
"loss": 0.0575,
"step": 197500
},
{
"epoch": 9.674109542189866,
"grad_norm": 0.006993240211158991,
"learning_rate": 3.4669341802286e-07,
"loss": 0.059,
"step": 198000
},
{
"epoch": 9.698539111740851,
"grad_norm": 0.5008031129837036,
"learning_rate": 3.207044061770684e-07,
"loss": 0.0728,
"step": 198500
},
{
"epoch": 9.722968681291835,
"grad_norm": 0.20838424563407898,
"learning_rate": 2.9471539433127676e-07,
"loss": 0.0706,
"step": 199000
},
{
"epoch": 9.74739825084282,
"grad_norm": 0.023256618529558182,
"learning_rate": 2.6872638248548514e-07,
"loss": 0.0704,
"step": 199500
},
{
"epoch": 9.771827820393804,
"grad_norm": 0.2845751643180847,
"learning_rate": 2.427373706396936e-07,
"loss": 0.0665,
"step": 200000
},
{
"epoch": 9.796257389944788,
"grad_norm": 0.04077767953276634,
"learning_rate": 2.1674835879390194e-07,
"loss": 0.0609,
"step": 200500
},
{
"epoch": 9.820686959495774,
"grad_norm": 0.01134885661303997,
"learning_rate": 1.9075934694811035e-07,
"loss": 0.0593,
"step": 201000
},
{
"epoch": 9.845116529046757,
"grad_norm": 0.018247269093990326,
"learning_rate": 1.6477033510231873e-07,
"loss": 0.0657,
"step": 201500
},
{
"epoch": 9.869546098597743,
"grad_norm": 0.008613000623881817,
"learning_rate": 1.3878132325652717e-07,
"loss": 0.0563,
"step": 202000
},
{
"epoch": 9.893975668148727,
"grad_norm": 0.2736217975616455,
"learning_rate": 1.1279231141073555e-07,
"loss": 0.0709,
"step": 202500
},
{
"epoch": 9.918405237699712,
"grad_norm": 0.02775813639163971,
"learning_rate": 8.680329956494395e-08,
"loss": 0.0677,
"step": 203000
},
{
"epoch": 9.942834807250696,
"grad_norm": 0.06576963514089584,
"learning_rate": 6.081428771915235e-08,
"loss": 0.0685,
"step": 203500
},
{
"epoch": 9.967264376801682,
"grad_norm": 0.1034393236041069,
"learning_rate": 3.4825275873360744e-08,
"loss": 0.0677,
"step": 204000
},
{
"epoch": 9.991693946352665,
"grad_norm": 0.14732515811920166,
"learning_rate": 8.836264027569144e-09,
"loss": 0.0649,
"step": 204500
},
{
"epoch": 10.0,
"eval_accuracy": 0.9122165727635014,
"eval_averaged_scores": 0.8973993679198728,
"eval_f1": 0.8825821630762444,
"eval_loss": 0.6054433584213257,
"eval_runtime": 20.1824,
"eval_samples_per_second": 1802.808,
"eval_steps_per_second": 112.722,
"step": 204670
},
{
"epoch": 10.0,
"step": 204670,
"total_flos": 1.0020319530941755e+17,
"train_loss": 0.16519966338430658,
"train_runtime": 10940.7303,
"train_samples_per_second": 299.305,
"train_steps_per_second": 18.707
}
],
"logging_steps": 500,
"max_steps": 204670,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.0020319530941755e+17,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}