KexuanShi's picture
Upload 84 files
9fd932d verified
{
"best_metric": 0.8667464541264547,
"best_model_checkpoint": "./nlu_finetuned_models/mnli/roberta-base_lr1e-05/checkpoint-110450",
"epoch": 10.0,
"eval_steps": 500,
"global_step": 220900,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.022634676324128564,
"grad_norm": 1.6427655220031738,
"learning_rate": 3.772446054021428e-07,
"loss": 1.0998,
"step": 500
},
{
"epoch": 0.04526935264825713,
"grad_norm": 3.6388282775878906,
"learning_rate": 7.544892108042856e-07,
"loss": 1.0989,
"step": 1000
},
{
"epoch": 0.06790402897238569,
"grad_norm": 5.199594974517822,
"learning_rate": 1.1317338162064282e-06,
"loss": 1.0951,
"step": 1500
},
{
"epoch": 0.09053870529651425,
"grad_norm": 16.297880172729492,
"learning_rate": 1.5089784216085712e-06,
"loss": 0.9832,
"step": 2000
},
{
"epoch": 0.11317338162064282,
"grad_norm": 30.214738845825195,
"learning_rate": 1.886223027010714e-06,
"loss": 0.7491,
"step": 2500
},
{
"epoch": 0.13580805794477138,
"grad_norm": 27.916301727294922,
"learning_rate": 2.2634676324128565e-06,
"loss": 0.6467,
"step": 3000
},
{
"epoch": 0.15844273426889996,
"grad_norm": 42.154232025146484,
"learning_rate": 2.6407122378149996e-06,
"loss": 0.6092,
"step": 3500
},
{
"epoch": 0.1810774105930285,
"grad_norm": 29.020992279052734,
"learning_rate": 3.0179568432171424e-06,
"loss": 0.5782,
"step": 4000
},
{
"epoch": 0.2037120869171571,
"grad_norm": 31.89137840270996,
"learning_rate": 3.395201448619285e-06,
"loss": 0.5567,
"step": 4500
},
{
"epoch": 0.22634676324128564,
"grad_norm": 46.158416748046875,
"learning_rate": 3.772446054021428e-06,
"loss": 0.5382,
"step": 5000
},
{
"epoch": 0.24898143956541421,
"grad_norm": 16.78737449645996,
"learning_rate": 4.149690659423571e-06,
"loss": 0.5278,
"step": 5500
},
{
"epoch": 0.27161611588954276,
"grad_norm": 13.616703033447266,
"learning_rate": 4.526935264825713e-06,
"loss": 0.5233,
"step": 6000
},
{
"epoch": 0.29425079221367134,
"grad_norm": 21.825986862182617,
"learning_rate": 4.904179870227856e-06,
"loss": 0.5042,
"step": 6500
},
{
"epoch": 0.3168854685377999,
"grad_norm": 24.68229103088379,
"learning_rate": 5.281424475629999e-06,
"loss": 0.4967,
"step": 7000
},
{
"epoch": 0.3395201448619285,
"grad_norm": 17.55514907836914,
"learning_rate": 5.658669081032142e-06,
"loss": 0.4979,
"step": 7500
},
{
"epoch": 0.362154821186057,
"grad_norm": 20.74059295654297,
"learning_rate": 6.035913686434285e-06,
"loss": 0.4983,
"step": 8000
},
{
"epoch": 0.3847894975101856,
"grad_norm": 12.430363655090332,
"learning_rate": 6.4131582918364275e-06,
"loss": 0.4773,
"step": 8500
},
{
"epoch": 0.4074241738343142,
"grad_norm": 30.890859603881836,
"learning_rate": 6.79040289723857e-06,
"loss": 0.4802,
"step": 9000
},
{
"epoch": 0.43005885015844275,
"grad_norm": 27.11601448059082,
"learning_rate": 7.167647502640713e-06,
"loss": 0.4631,
"step": 9500
},
{
"epoch": 0.4526935264825713,
"grad_norm": 22.201452255249023,
"learning_rate": 7.544892108042856e-06,
"loss": 0.4575,
"step": 10000
},
{
"epoch": 0.47532820280669985,
"grad_norm": 22.29547119140625,
"learning_rate": 7.922136713445e-06,
"loss": 0.4643,
"step": 10500
},
{
"epoch": 0.49796287913082843,
"grad_norm": 15.072667121887207,
"learning_rate": 8.299381318847142e-06,
"loss": 0.466,
"step": 11000
},
{
"epoch": 0.520597555454957,
"grad_norm": 28.1677188873291,
"learning_rate": 8.676625924249283e-06,
"loss": 0.4619,
"step": 11500
},
{
"epoch": 0.5432322317790855,
"grad_norm": 17.43030548095703,
"learning_rate": 9.053870529651426e-06,
"loss": 0.4542,
"step": 12000
},
{
"epoch": 0.5658669081032142,
"grad_norm": 23.5450382232666,
"learning_rate": 9.431115135053569e-06,
"loss": 0.449,
"step": 12500
},
{
"epoch": 0.5885015844273427,
"grad_norm": 21.659141540527344,
"learning_rate": 9.808359740455711e-06,
"loss": 0.4434,
"step": 13000
},
{
"epoch": 0.6111362607514712,
"grad_norm": 19.54996681213379,
"learning_rate": 9.98815291409418e-06,
"loss": 0.4555,
"step": 13500
},
{
"epoch": 0.6337709370755998,
"grad_norm": 11.85847282409668,
"learning_rate": 9.964073471196171e-06,
"loss": 0.4511,
"step": 14000
},
{
"epoch": 0.6564056133997284,
"grad_norm": 26.537872314453125,
"learning_rate": 9.939994028298163e-06,
"loss": 0.4424,
"step": 14500
},
{
"epoch": 0.679040289723857,
"grad_norm": 16.38085174560547,
"learning_rate": 9.915914585400153e-06,
"loss": 0.4488,
"step": 15000
},
{
"epoch": 0.7016749660479855,
"grad_norm": 20.928190231323242,
"learning_rate": 9.891835142502145e-06,
"loss": 0.4368,
"step": 15500
},
{
"epoch": 0.724309642372114,
"grad_norm": 16.260562896728516,
"learning_rate": 9.867755699604135e-06,
"loss": 0.4481,
"step": 16000
},
{
"epoch": 0.7469443186962427,
"grad_norm": 9.784706115722656,
"learning_rate": 9.843676256706126e-06,
"loss": 0.4344,
"step": 16500
},
{
"epoch": 0.7695789950203712,
"grad_norm": 6.384799480438232,
"learning_rate": 9.819596813808116e-06,
"loss": 0.4416,
"step": 17000
},
{
"epoch": 0.7922136713444998,
"grad_norm": 14.344380378723145,
"learning_rate": 9.795517370910108e-06,
"loss": 0.429,
"step": 17500
},
{
"epoch": 0.8148483476686283,
"grad_norm": 33.43516159057617,
"learning_rate": 9.771437928012098e-06,
"loss": 0.4271,
"step": 18000
},
{
"epoch": 0.8374830239927569,
"grad_norm": 23.591716766357422,
"learning_rate": 9.74735848511409e-06,
"loss": 0.44,
"step": 18500
},
{
"epoch": 0.8601177003168855,
"grad_norm": 20.75270652770996,
"learning_rate": 9.723279042216081e-06,
"loss": 0.423,
"step": 19000
},
{
"epoch": 0.882752376641014,
"grad_norm": 24.833736419677734,
"learning_rate": 9.699199599318071e-06,
"loss": 0.4212,
"step": 19500
},
{
"epoch": 0.9053870529651425,
"grad_norm": 18.03992462158203,
"learning_rate": 9.675120156420061e-06,
"loss": 0.4164,
"step": 20000
},
{
"epoch": 0.9280217292892712,
"grad_norm": 12.502860069274902,
"learning_rate": 9.651040713522053e-06,
"loss": 0.4213,
"step": 20500
},
{
"epoch": 0.9506564056133997,
"grad_norm": 13.808119773864746,
"learning_rate": 9.626961270624043e-06,
"loss": 0.4197,
"step": 21000
},
{
"epoch": 0.9732910819375283,
"grad_norm": 15.191283226013184,
"learning_rate": 9.602881827726035e-06,
"loss": 0.4219,
"step": 21500
},
{
"epoch": 0.9959257582616569,
"grad_norm": 27.065793991088867,
"learning_rate": 9.578802384828026e-06,
"loss": 0.3965,
"step": 22000
},
{
"epoch": 1.0,
"eval_accuracy": 0.8536579155101729,
"eval_loss": 0.4072297513484955,
"eval_runtime": 25.9705,
"eval_samples_per_second": 1512.137,
"eval_steps_per_second": 94.53,
"step": 22090
},
{
"epoch": 1.0185604345857855,
"grad_norm": 40.00636672973633,
"learning_rate": 9.554722941930016e-06,
"loss": 0.371,
"step": 22500
},
{
"epoch": 1.041195110909914,
"grad_norm": 14.272022247314453,
"learning_rate": 9.530643499032008e-06,
"loss": 0.357,
"step": 23000
},
{
"epoch": 1.0638297872340425,
"grad_norm": 14.10964298248291,
"learning_rate": 9.506564056133998e-06,
"loss": 0.3584,
"step": 23500
},
{
"epoch": 1.086464463558171,
"grad_norm": 49.96983337402344,
"learning_rate": 9.48248461323599e-06,
"loss": 0.3567,
"step": 24000
},
{
"epoch": 1.1090991398822996,
"grad_norm": 15.82590389251709,
"learning_rate": 9.45840517033798e-06,
"loss": 0.3682,
"step": 24500
},
{
"epoch": 1.1317338162064283,
"grad_norm": 17.939584732055664,
"learning_rate": 9.43432572743997e-06,
"loss": 0.3589,
"step": 25000
},
{
"epoch": 1.1543684925305568,
"grad_norm": 9.916324615478516,
"learning_rate": 9.410246284541961e-06,
"loss": 0.3654,
"step": 25500
},
{
"epoch": 1.1770031688546854,
"grad_norm": 13.99868392944336,
"learning_rate": 9.386166841643953e-06,
"loss": 0.3609,
"step": 26000
},
{
"epoch": 1.1996378451788139,
"grad_norm": 19.407920837402344,
"learning_rate": 9.362087398745945e-06,
"loss": 0.3615,
"step": 26500
},
{
"epoch": 1.2222725215029424,
"grad_norm": 22.240596771240234,
"learning_rate": 9.338007955847935e-06,
"loss": 0.3628,
"step": 27000
},
{
"epoch": 1.2449071978270712,
"grad_norm": 27.17753028869629,
"learning_rate": 9.313928512949925e-06,
"loss": 0.3629,
"step": 27500
},
{
"epoch": 1.2675418741511997,
"grad_norm": 16.045578002929688,
"learning_rate": 9.289849070051916e-06,
"loss": 0.3735,
"step": 28000
},
{
"epoch": 1.2901765504753282,
"grad_norm": 16.507614135742188,
"learning_rate": 9.265769627153906e-06,
"loss": 0.3646,
"step": 28500
},
{
"epoch": 1.3128112267994567,
"grad_norm": 16.56389808654785,
"learning_rate": 9.241690184255898e-06,
"loss": 0.3459,
"step": 29000
},
{
"epoch": 1.3354459031235852,
"grad_norm": 21.800291061401367,
"learning_rate": 9.217610741357888e-06,
"loss": 0.3552,
"step": 29500
},
{
"epoch": 1.358080579447714,
"grad_norm": 19.036035537719727,
"learning_rate": 9.19353129845988e-06,
"loss": 0.3569,
"step": 30000
},
{
"epoch": 1.3807152557718425,
"grad_norm": 20.2823543548584,
"learning_rate": 9.169451855561871e-06,
"loss": 0.3635,
"step": 30500
},
{
"epoch": 1.403349932095971,
"grad_norm": 9.948273658752441,
"learning_rate": 9.145372412663861e-06,
"loss": 0.3626,
"step": 31000
},
{
"epoch": 1.4259846084200996,
"grad_norm": 24.87046241760254,
"learning_rate": 9.121292969765853e-06,
"loss": 0.3625,
"step": 31500
},
{
"epoch": 1.448619284744228,
"grad_norm": 7.488998889923096,
"learning_rate": 9.097213526867843e-06,
"loss": 0.3598,
"step": 32000
},
{
"epoch": 1.4712539610683568,
"grad_norm": 22.464574813842773,
"learning_rate": 9.073134083969835e-06,
"loss": 0.354,
"step": 32500
},
{
"epoch": 1.4938886373924853,
"grad_norm": 18.735248565673828,
"learning_rate": 9.049054641071825e-06,
"loss": 0.3559,
"step": 33000
},
{
"epoch": 1.5165233137166139,
"grad_norm": 17.83134651184082,
"learning_rate": 9.024975198173815e-06,
"loss": 0.3458,
"step": 33500
},
{
"epoch": 1.5391579900407424,
"grad_norm": 22.070232391357422,
"learning_rate": 9.000895755275806e-06,
"loss": 0.3665,
"step": 34000
},
{
"epoch": 1.561792666364871,
"grad_norm": 6.331955432891846,
"learning_rate": 8.976816312377798e-06,
"loss": 0.3607,
"step": 34500
},
{
"epoch": 1.5844273426889997,
"grad_norm": 9.2369966506958,
"learning_rate": 8.95273686947979e-06,
"loss": 0.3549,
"step": 35000
},
{
"epoch": 1.607062019013128,
"grad_norm": 14.87072467803955,
"learning_rate": 8.92865742658178e-06,
"loss": 0.3577,
"step": 35500
},
{
"epoch": 1.6296966953372567,
"grad_norm": 21.532787322998047,
"learning_rate": 8.90457798368377e-06,
"loss": 0.353,
"step": 36000
},
{
"epoch": 1.6523313716613852,
"grad_norm": 21.97151756286621,
"learning_rate": 8.880498540785761e-06,
"loss": 0.3706,
"step": 36500
},
{
"epoch": 1.6749660479855137,
"grad_norm": 17.71976661682129,
"learning_rate": 8.856419097887751e-06,
"loss": 0.3648,
"step": 37000
},
{
"epoch": 1.6976007243096425,
"grad_norm": 21.98705291748047,
"learning_rate": 8.832339654989743e-06,
"loss": 0.3591,
"step": 37500
},
{
"epoch": 1.7202354006337708,
"grad_norm": 26.871360778808594,
"learning_rate": 8.808260212091733e-06,
"loss": 0.3454,
"step": 38000
},
{
"epoch": 1.7428700769578995,
"grad_norm": 12.331396102905273,
"learning_rate": 8.784180769193725e-06,
"loss": 0.3418,
"step": 38500
},
{
"epoch": 1.765504753282028,
"grad_norm": 7.1670756340026855,
"learning_rate": 8.760101326295716e-06,
"loss": 0.3454,
"step": 39000
},
{
"epoch": 1.7881394296061566,
"grad_norm": 18.57856559753418,
"learning_rate": 8.736021883397706e-06,
"loss": 0.345,
"step": 39500
},
{
"epoch": 1.8107741059302853,
"grad_norm": 36.80974578857422,
"learning_rate": 8.711942440499698e-06,
"loss": 0.3475,
"step": 40000
},
{
"epoch": 1.8334087822544136,
"grad_norm": 19.742300033569336,
"learning_rate": 8.687862997601688e-06,
"loss": 0.3499,
"step": 40500
},
{
"epoch": 1.8560434585785424,
"grad_norm": 13.53554630279541,
"learning_rate": 8.663783554703678e-06,
"loss": 0.3625,
"step": 41000
},
{
"epoch": 1.8786781349026709,
"grad_norm": 12.660998344421387,
"learning_rate": 8.63970411180567e-06,
"loss": 0.355,
"step": 41500
},
{
"epoch": 1.9013128112267994,
"grad_norm": 10.94740104675293,
"learning_rate": 8.615624668907661e-06,
"loss": 0.3495,
"step": 42000
},
{
"epoch": 1.9239474875509281,
"grad_norm": 39.87784957885742,
"learning_rate": 8.591545226009653e-06,
"loss": 0.3484,
"step": 42500
},
{
"epoch": 1.9465821638750564,
"grad_norm": 15.916511535644531,
"learning_rate": 8.567465783111643e-06,
"loss": 0.3511,
"step": 43000
},
{
"epoch": 1.9692168401991852,
"grad_norm": 19.070566177368164,
"learning_rate": 8.543386340213633e-06,
"loss": 0.334,
"step": 43500
},
{
"epoch": 1.9918515165233137,
"grad_norm": 21.015113830566406,
"learning_rate": 8.519306897315625e-06,
"loss": 0.3463,
"step": 44000
},
{
"epoch": 2.0,
"eval_accuracy": 0.8632324106847292,
"eval_loss": 0.37872129678726196,
"eval_runtime": 26.0123,
"eval_samples_per_second": 1509.711,
"eval_steps_per_second": 94.379,
"step": 44180
},
{
"epoch": 2.0144861928474422,
"grad_norm": 12.006568908691406,
"learning_rate": 8.495227454417615e-06,
"loss": 0.2925,
"step": 44500
},
{
"epoch": 2.037120869171571,
"grad_norm": 16.82537078857422,
"learning_rate": 8.471148011519606e-06,
"loss": 0.2821,
"step": 45000
},
{
"epoch": 2.0597555454956993,
"grad_norm": 14.019499778747559,
"learning_rate": 8.447068568621596e-06,
"loss": 0.2759,
"step": 45500
},
{
"epoch": 2.082390221819828,
"grad_norm": 11.033167839050293,
"learning_rate": 8.422989125723588e-06,
"loss": 0.2778,
"step": 46000
},
{
"epoch": 2.1050248981439568,
"grad_norm": 55.102169036865234,
"learning_rate": 8.39890968282558e-06,
"loss": 0.2759,
"step": 46500
},
{
"epoch": 2.127659574468085,
"grad_norm": 22.346426010131836,
"learning_rate": 8.37483023992757e-06,
"loss": 0.2793,
"step": 47000
},
{
"epoch": 2.150294250792214,
"grad_norm": 9.00412368774414,
"learning_rate": 8.350750797029561e-06,
"loss": 0.2696,
"step": 47500
},
{
"epoch": 2.172928927116342,
"grad_norm": 13.098092079162598,
"learning_rate": 8.326671354131551e-06,
"loss": 0.2882,
"step": 48000
},
{
"epoch": 2.195563603440471,
"grad_norm": 16.290449142456055,
"learning_rate": 8.302591911233543e-06,
"loss": 0.2807,
"step": 48500
},
{
"epoch": 2.218198279764599,
"grad_norm": 36.5540771484375,
"learning_rate": 8.278512468335533e-06,
"loss": 0.2761,
"step": 49000
},
{
"epoch": 2.240832956088728,
"grad_norm": 6.7274065017700195,
"learning_rate": 8.254433025437523e-06,
"loss": 0.2727,
"step": 49500
},
{
"epoch": 2.2634676324128566,
"grad_norm": 10.446264266967773,
"learning_rate": 8.230353582539515e-06,
"loss": 0.2798,
"step": 50000
},
{
"epoch": 2.286102308736985,
"grad_norm": 14.677602767944336,
"learning_rate": 8.206274139641506e-06,
"loss": 0.2677,
"step": 50500
},
{
"epoch": 2.3087369850611137,
"grad_norm": 23.758255004882812,
"learning_rate": 8.182194696743498e-06,
"loss": 0.2917,
"step": 51000
},
{
"epoch": 2.3313716613852424,
"grad_norm": 15.98766803741455,
"learning_rate": 8.158115253845488e-06,
"loss": 0.2918,
"step": 51500
},
{
"epoch": 2.3540063377093707,
"grad_norm": 2.399088144302368,
"learning_rate": 8.134035810947478e-06,
"loss": 0.28,
"step": 52000
},
{
"epoch": 2.3766410140334995,
"grad_norm": 15.759695053100586,
"learning_rate": 8.10995636804947e-06,
"loss": 0.2775,
"step": 52500
},
{
"epoch": 2.3992756903576278,
"grad_norm": 46.171875,
"learning_rate": 8.08587692515146e-06,
"loss": 0.2799,
"step": 53000
},
{
"epoch": 2.4219103666817565,
"grad_norm": 41.83917236328125,
"learning_rate": 8.061797482253451e-06,
"loss": 0.2905,
"step": 53500
},
{
"epoch": 2.444545043005885,
"grad_norm": 28.17142677307129,
"learning_rate": 8.037718039355441e-06,
"loss": 0.2897,
"step": 54000
},
{
"epoch": 2.4671797193300136,
"grad_norm": 30.47774314880371,
"learning_rate": 8.013638596457433e-06,
"loss": 0.2687,
"step": 54500
},
{
"epoch": 2.4898143956541423,
"grad_norm": 24.639873504638672,
"learning_rate": 7.989559153559425e-06,
"loss": 0.2965,
"step": 55000
},
{
"epoch": 2.5124490719782706,
"grad_norm": 24.947662353515625,
"learning_rate": 7.965479710661415e-06,
"loss": 0.2893,
"step": 55500
},
{
"epoch": 2.5350837483023994,
"grad_norm": 28.483293533325195,
"learning_rate": 7.941400267763406e-06,
"loss": 0.2757,
"step": 56000
},
{
"epoch": 2.557718424626528,
"grad_norm": 45.0990104675293,
"learning_rate": 7.917320824865396e-06,
"loss": 0.2682,
"step": 56500
},
{
"epoch": 2.5803531009506564,
"grad_norm": 22.60608673095703,
"learning_rate": 7.893241381967386e-06,
"loss": 0.2779,
"step": 57000
},
{
"epoch": 2.6029877772747847,
"grad_norm": 31.8905029296875,
"learning_rate": 7.869161939069378e-06,
"loss": 0.2857,
"step": 57500
},
{
"epoch": 2.6256224535989134,
"grad_norm": 10.05256175994873,
"learning_rate": 7.84508249617137e-06,
"loss": 0.2765,
"step": 58000
},
{
"epoch": 2.648257129923042,
"grad_norm": 18.466182708740234,
"learning_rate": 7.82100305327336e-06,
"loss": 0.2743,
"step": 58500
},
{
"epoch": 2.6708918062471705,
"grad_norm": 22.704708099365234,
"learning_rate": 7.796923610375351e-06,
"loss": 0.2718,
"step": 59000
},
{
"epoch": 2.6935264825712992,
"grad_norm": 38.51487731933594,
"learning_rate": 7.772844167477341e-06,
"loss": 0.2733,
"step": 59500
},
{
"epoch": 2.716161158895428,
"grad_norm": 25.67682647705078,
"learning_rate": 7.748764724579333e-06,
"loss": 0.2803,
"step": 60000
},
{
"epoch": 2.7387958352195563,
"grad_norm": 39.251068115234375,
"learning_rate": 7.724685281681323e-06,
"loss": 0.2757,
"step": 60500
},
{
"epoch": 2.761430511543685,
"grad_norm": 20.692581176757812,
"learning_rate": 7.700605838783315e-06,
"loss": 0.2853,
"step": 61000
},
{
"epoch": 2.7840651878678138,
"grad_norm": 22.915571212768555,
"learning_rate": 7.676526395885305e-06,
"loss": 0.2899,
"step": 61500
},
{
"epoch": 2.806699864191942,
"grad_norm": 20.167299270629883,
"learning_rate": 7.652446952987296e-06,
"loss": 0.2802,
"step": 62000
},
{
"epoch": 2.8293345405160704,
"grad_norm": 31.358797073364258,
"learning_rate": 7.628367510089287e-06,
"loss": 0.2775,
"step": 62500
},
{
"epoch": 2.851969216840199,
"grad_norm": 16.160572052001953,
"learning_rate": 7.604288067191278e-06,
"loss": 0.2755,
"step": 63000
},
{
"epoch": 2.874603893164328,
"grad_norm": 24.560882568359375,
"learning_rate": 7.58020862429327e-06,
"loss": 0.3024,
"step": 63500
},
{
"epoch": 2.897238569488456,
"grad_norm": 30.818029403686523,
"learning_rate": 7.55612918139526e-06,
"loss": 0.2878,
"step": 64000
},
{
"epoch": 2.919873245812585,
"grad_norm": 31.68956756591797,
"learning_rate": 7.532049738497251e-06,
"loss": 0.2793,
"step": 64500
},
{
"epoch": 2.9425079221367136,
"grad_norm": 17.775924682617188,
"learning_rate": 7.507970295599241e-06,
"loss": 0.2824,
"step": 65000
},
{
"epoch": 2.965142598460842,
"grad_norm": 26.35023307800293,
"learning_rate": 7.483890852701232e-06,
"loss": 0.275,
"step": 65500
},
{
"epoch": 2.9877772747849707,
"grad_norm": 13.411957740783691,
"learning_rate": 7.459811409803224e-06,
"loss": 0.2867,
"step": 66000
},
{
"epoch": 3.0,
"eval_accuracy": 0.8651422169030583,
"eval_loss": 0.39841848611831665,
"eval_runtime": 26.0093,
"eval_samples_per_second": 1509.882,
"eval_steps_per_second": 94.389,
"step": 66270
},
{
"epoch": 3.010411951109099,
"grad_norm": 31.959758758544922,
"learning_rate": 7.435731966905214e-06,
"loss": 0.2628,
"step": 66500
},
{
"epoch": 3.0330466274332277,
"grad_norm": 9.174257278442383,
"learning_rate": 7.4116525240072056e-06,
"loss": 0.2354,
"step": 67000
},
{
"epoch": 3.0556813037573565,
"grad_norm": 41.27067565917969,
"learning_rate": 7.387573081109196e-06,
"loss": 0.231,
"step": 67500
},
{
"epoch": 3.0783159800814848,
"grad_norm": 37.20170211791992,
"learning_rate": 7.363493638211186e-06,
"loss": 0.2168,
"step": 68000
},
{
"epoch": 3.1009506564056135,
"grad_norm": 37.324825286865234,
"learning_rate": 7.339414195313178e-06,
"loss": 0.2244,
"step": 68500
},
{
"epoch": 3.123585332729742,
"grad_norm": 39.479610443115234,
"learning_rate": 7.315334752415169e-06,
"loss": 0.2199,
"step": 69000
},
{
"epoch": 3.1462200090538706,
"grad_norm": 38.33029556274414,
"learning_rate": 7.29125530951716e-06,
"loss": 0.2285,
"step": 69500
},
{
"epoch": 3.1688546853779993,
"grad_norm": 34.06528091430664,
"learning_rate": 7.2671758666191506e-06,
"loss": 0.224,
"step": 70000
},
{
"epoch": 3.1914893617021276,
"grad_norm": 36.66078186035156,
"learning_rate": 7.2430964237211406e-06,
"loss": 0.2224,
"step": 70500
},
{
"epoch": 3.2141240380262563,
"grad_norm": 13.858600616455078,
"learning_rate": 7.219016980823132e-06,
"loss": 0.2163,
"step": 71000
},
{
"epoch": 3.2367587143503846,
"grad_norm": 2.7164244651794434,
"learning_rate": 7.194937537925123e-06,
"loss": 0.2356,
"step": 71500
},
{
"epoch": 3.2593933906745134,
"grad_norm": 57.87660598754883,
"learning_rate": 7.170858095027115e-06,
"loss": 0.2271,
"step": 72000
},
{
"epoch": 3.2820280669986417,
"grad_norm": 90.21813201904297,
"learning_rate": 7.146778652129105e-06,
"loss": 0.2177,
"step": 72500
},
{
"epoch": 3.3046627433227704,
"grad_norm": 14.034249305725098,
"learning_rate": 7.1226992092310956e-06,
"loss": 0.2225,
"step": 73000
},
{
"epoch": 3.327297419646899,
"grad_norm": 46.05585861206055,
"learning_rate": 7.098619766333087e-06,
"loss": 0.2242,
"step": 73500
},
{
"epoch": 3.3499320959710275,
"grad_norm": 37.766517639160156,
"learning_rate": 7.074540323435077e-06,
"loss": 0.2384,
"step": 74000
},
{
"epoch": 3.3725667722951562,
"grad_norm": 9.106913566589355,
"learning_rate": 7.050460880537069e-06,
"loss": 0.2485,
"step": 74500
},
{
"epoch": 3.395201448619285,
"grad_norm": 15.67898178100586,
"learning_rate": 7.026381437639059e-06,
"loss": 0.2373,
"step": 75000
},
{
"epoch": 3.4178361249434133,
"grad_norm": 26.127885818481445,
"learning_rate": 7.00230199474105e-06,
"loss": 0.2333,
"step": 75500
},
{
"epoch": 3.440470801267542,
"grad_norm": 14.250904083251953,
"learning_rate": 6.9782225518430414e-06,
"loss": 0.2189,
"step": 76000
},
{
"epoch": 3.4631054775916703,
"grad_norm": 67.55126190185547,
"learning_rate": 6.954143108945031e-06,
"loss": 0.2378,
"step": 76500
},
{
"epoch": 3.485740153915799,
"grad_norm": 12.584871292114258,
"learning_rate": 6.930063666047023e-06,
"loss": 0.2448,
"step": 77000
},
{
"epoch": 3.5083748302399274,
"grad_norm": 26.13035011291504,
"learning_rate": 6.905984223149014e-06,
"loss": 0.2302,
"step": 77500
},
{
"epoch": 3.531009506564056,
"grad_norm": 19.708215713500977,
"learning_rate": 6.881904780251004e-06,
"loss": 0.2239,
"step": 78000
},
{
"epoch": 3.553644182888185,
"grad_norm": 9.146390914916992,
"learning_rate": 6.857825337352996e-06,
"loss": 0.2303,
"step": 78500
},
{
"epoch": 3.576278859212313,
"grad_norm": 69.38152313232422,
"learning_rate": 6.8337458944549864e-06,
"loss": 0.2366,
"step": 79000
},
{
"epoch": 3.598913535536442,
"grad_norm": 43.6939811706543,
"learning_rate": 6.809666451556978e-06,
"loss": 0.2263,
"step": 79500
},
{
"epoch": 3.6215482118605706,
"grad_norm": 12.009560585021973,
"learning_rate": 6.785587008658968e-06,
"loss": 0.2251,
"step": 80000
},
{
"epoch": 3.644182888184699,
"grad_norm": 21.58733558654785,
"learning_rate": 6.761507565760959e-06,
"loss": 0.2447,
"step": 80500
},
{
"epoch": 3.6668175645088277,
"grad_norm": 15.762284278869629,
"learning_rate": 6.73742812286295e-06,
"loss": 0.2349,
"step": 81000
},
{
"epoch": 3.689452240832956,
"grad_norm": 32.47639083862305,
"learning_rate": 6.713348679964941e-06,
"loss": 0.2413,
"step": 81500
},
{
"epoch": 3.7120869171570847,
"grad_norm": 36.506526947021484,
"learning_rate": 6.689269237066932e-06,
"loss": 0.2311,
"step": 82000
},
{
"epoch": 3.734721593481213,
"grad_norm": 30.49101448059082,
"learning_rate": 6.665189794168922e-06,
"loss": 0.2399,
"step": 82500
},
{
"epoch": 3.7573562698053418,
"grad_norm": 10.327536582946777,
"learning_rate": 6.641110351270914e-06,
"loss": 0.2332,
"step": 83000
},
{
"epoch": 3.7799909461294705,
"grad_norm": 41.280303955078125,
"learning_rate": 6.617030908372905e-06,
"loss": 0.2274,
"step": 83500
},
{
"epoch": 3.802625622453599,
"grad_norm": 35.73218536376953,
"learning_rate": 6.592951465474895e-06,
"loss": 0.2363,
"step": 84000
},
{
"epoch": 3.8252602987777276,
"grad_norm": 19.240692138671875,
"learning_rate": 6.5688720225768865e-06,
"loss": 0.2408,
"step": 84500
},
{
"epoch": 3.8478949751018563,
"grad_norm": 55.575645446777344,
"learning_rate": 6.544792579678877e-06,
"loss": 0.2281,
"step": 85000
},
{
"epoch": 3.8705296514259846,
"grad_norm": 5.29152250289917,
"learning_rate": 6.520713136780868e-06,
"loss": 0.2316,
"step": 85500
},
{
"epoch": 3.893164327750113,
"grad_norm": 74.08470153808594,
"learning_rate": 6.496633693882859e-06,
"loss": 0.2361,
"step": 86000
},
{
"epoch": 3.9157990040742416,
"grad_norm": 23.3903751373291,
"learning_rate": 6.472554250984849e-06,
"loss": 0.2425,
"step": 86500
},
{
"epoch": 3.9384336803983704,
"grad_norm": 37.02583694458008,
"learning_rate": 6.448474808086841e-06,
"loss": 0.2377,
"step": 87000
},
{
"epoch": 3.9610683567224987,
"grad_norm": 13.134513854980469,
"learning_rate": 6.4243953651888315e-06,
"loss": 0.2424,
"step": 87500
},
{
"epoch": 3.9837030330466274,
"grad_norm": 10.808335304260254,
"learning_rate": 6.400315922290823e-06,
"loss": 0.2339,
"step": 88000
},
{
"epoch": 4.0,
"eval_accuracy": 0.8661353161365893,
"eval_loss": 0.4954204857349396,
"eval_runtime": 26.0621,
"eval_samples_per_second": 1506.824,
"eval_steps_per_second": 94.198,
"step": 88360
},
{
"epoch": 4.006337709370756,
"grad_norm": 9.569930076599121,
"learning_rate": 6.376236479392813e-06,
"loss": 0.2142,
"step": 88500
},
{
"epoch": 4.0289723856948845,
"grad_norm": 25.4268856048584,
"learning_rate": 6.352157036494804e-06,
"loss": 0.1827,
"step": 89000
},
{
"epoch": 4.051607062019013,
"grad_norm": 60.39373779296875,
"learning_rate": 6.328077593596796e-06,
"loss": 0.197,
"step": 89500
},
{
"epoch": 4.074241738343142,
"grad_norm": 63.3898811340332,
"learning_rate": 6.303998150698786e-06,
"loss": 0.196,
"step": 90000
},
{
"epoch": 4.09687641466727,
"grad_norm": 61.60245132446289,
"learning_rate": 6.279918707800777e-06,
"loss": 0.1904,
"step": 90500
},
{
"epoch": 4.119511090991399,
"grad_norm": 12.662140846252441,
"learning_rate": 6.255839264902767e-06,
"loss": 0.1933,
"step": 91000
},
{
"epoch": 4.142145767315528,
"grad_norm": 15.43615436553955,
"learning_rate": 6.231759822004758e-06,
"loss": 0.1922,
"step": 91500
},
{
"epoch": 4.164780443639656,
"grad_norm": 64.14022064208984,
"learning_rate": 6.20768037910675e-06,
"loss": 0.1969,
"step": 92000
},
{
"epoch": 4.187415119963784,
"grad_norm": 46.96083068847656,
"learning_rate": 6.18360093620874e-06,
"loss": 0.2086,
"step": 92500
},
{
"epoch": 4.2100497962879135,
"grad_norm": 28.856672286987305,
"learning_rate": 6.1595214933107315e-06,
"loss": 0.2022,
"step": 93000
},
{
"epoch": 4.232684472612042,
"grad_norm": 0.08567750453948975,
"learning_rate": 6.135442050412722e-06,
"loss": 0.1966,
"step": 93500
},
{
"epoch": 4.25531914893617,
"grad_norm": 23.097795486450195,
"learning_rate": 6.111362607514712e-06,
"loss": 0.1994,
"step": 94000
},
{
"epoch": 4.277953825260298,
"grad_norm": 124.21513366699219,
"learning_rate": 6.087283164616704e-06,
"loss": 0.2014,
"step": 94500
},
{
"epoch": 4.300588501584428,
"grad_norm": 106.59521484375,
"learning_rate": 6.063203721718695e-06,
"loss": 0.1999,
"step": 95000
},
{
"epoch": 4.323223177908556,
"grad_norm": 9.190028190612793,
"learning_rate": 6.039124278820686e-06,
"loss": 0.1989,
"step": 95500
},
{
"epoch": 4.345857854232684,
"grad_norm": 0.6034038066864014,
"learning_rate": 6.0150448359226765e-06,
"loss": 0.2003,
"step": 96000
},
{
"epoch": 4.368492530556813,
"grad_norm": 28.348718643188477,
"learning_rate": 5.990965393024667e-06,
"loss": 0.1944,
"step": 96500
},
{
"epoch": 4.391127206880942,
"grad_norm": 34.211814880371094,
"learning_rate": 5.966885950126658e-06,
"loss": 0.214,
"step": 97000
},
{
"epoch": 4.41376188320507,
"grad_norm": 78.27364349365234,
"learning_rate": 5.942806507228649e-06,
"loss": 0.1948,
"step": 97500
},
{
"epoch": 4.436396559529198,
"grad_norm": 39.985931396484375,
"learning_rate": 5.918727064330641e-06,
"loss": 0.1979,
"step": 98000
},
{
"epoch": 4.4590312358533275,
"grad_norm": 125.82061004638672,
"learning_rate": 5.894647621432631e-06,
"loss": 0.2057,
"step": 98500
},
{
"epoch": 4.481665912177456,
"grad_norm": 24.297237396240234,
"learning_rate": 5.8705681785346215e-06,
"loss": 0.1873,
"step": 99000
},
{
"epoch": 4.504300588501584,
"grad_norm": 14.736886024475098,
"learning_rate": 5.846488735636613e-06,
"loss": 0.2043,
"step": 99500
},
{
"epoch": 4.526935264825713,
"grad_norm": 47.609375,
"learning_rate": 5.822409292738603e-06,
"loss": 0.2043,
"step": 100000
},
{
"epoch": 4.549569941149842,
"grad_norm": 32.71791076660156,
"learning_rate": 5.798329849840595e-06,
"loss": 0.1981,
"step": 100500
},
{
"epoch": 4.57220461747397,
"grad_norm": 32.31149673461914,
"learning_rate": 5.774250406942586e-06,
"loss": 0.1904,
"step": 101000
},
{
"epoch": 4.594839293798099,
"grad_norm": 15.635351181030273,
"learning_rate": 5.7501709640445765e-06,
"loss": 0.2027,
"step": 101500
},
{
"epoch": 4.617473970122227,
"grad_norm": 0.519290566444397,
"learning_rate": 5.726091521146567e-06,
"loss": 0.2102,
"step": 102000
},
{
"epoch": 4.640108646446356,
"grad_norm": 54.78620910644531,
"learning_rate": 5.702012078248557e-06,
"loss": 0.2028,
"step": 102500
},
{
"epoch": 4.662743322770485,
"grad_norm": 15.834295272827148,
"learning_rate": 5.677932635350549e-06,
"loss": 0.2068,
"step": 103000
},
{
"epoch": 4.685377999094613,
"grad_norm": 18.294235229492188,
"learning_rate": 5.65385319245254e-06,
"loss": 0.2194,
"step": 103500
},
{
"epoch": 4.7080126754187415,
"grad_norm": 9.605391502380371,
"learning_rate": 5.6297737495545315e-06,
"loss": 0.1946,
"step": 104000
},
{
"epoch": 4.73064735174287,
"grad_norm": 50.544219970703125,
"learning_rate": 5.6056943066565215e-06,
"loss": 0.2007,
"step": 104500
},
{
"epoch": 4.753282028066999,
"grad_norm": 19.007843017578125,
"learning_rate": 5.581614863758512e-06,
"loss": 0.2192,
"step": 105000
},
{
"epoch": 4.775916704391127,
"grad_norm": 1.8254756927490234,
"learning_rate": 5.557535420860504e-06,
"loss": 0.1972,
"step": 105500
},
{
"epoch": 4.7985513807152556,
"grad_norm": 32.10722732543945,
"learning_rate": 5.533455977962494e-06,
"loss": 0.2088,
"step": 106000
},
{
"epoch": 4.821186057039384,
"grad_norm": 54.646392822265625,
"learning_rate": 5.509376535064486e-06,
"loss": 0.2111,
"step": 106500
},
{
"epoch": 4.843820733363513,
"grad_norm": 0.40658873319625854,
"learning_rate": 5.485297092166476e-06,
"loss": 0.2114,
"step": 107000
},
{
"epoch": 4.866455409687641,
"grad_norm": 12.083222389221191,
"learning_rate": 5.4612176492684665e-06,
"loss": 0.1959,
"step": 107500
},
{
"epoch": 4.88909008601177,
"grad_norm": 0.27834174036979675,
"learning_rate": 5.437138206370458e-06,
"loss": 0.1994,
"step": 108000
},
{
"epoch": 4.911724762335899,
"grad_norm": 7.066097259521484,
"learning_rate": 5.413058763472448e-06,
"loss": 0.2121,
"step": 108500
},
{
"epoch": 4.934359438660027,
"grad_norm": 39.164085388183594,
"learning_rate": 5.38897932057444e-06,
"loss": 0.1952,
"step": 109000
},
{
"epoch": 4.956994114984155,
"grad_norm": 27.279882431030273,
"learning_rate": 5.364899877676431e-06,
"loss": 0.1997,
"step": 109500
},
{
"epoch": 4.979628791308285,
"grad_norm": 54.53019332885742,
"learning_rate": 5.340820434778421e-06,
"loss": 0.2037,
"step": 110000
},
{
"epoch": 5.0,
"eval_accuracy": 0.8667464541264547,
"eval_loss": 0.6144042611122131,
"eval_runtime": 26.0348,
"eval_samples_per_second": 1508.405,
"eval_steps_per_second": 94.297,
"step": 110450
},
{
"epoch": 5.002263467632413,
"grad_norm": 24.079191207885742,
"learning_rate": 5.316740991880412e-06,
"loss": 0.197,
"step": 110500
},
{
"epoch": 5.024898143956541,
"grad_norm": 0.3425958752632141,
"learning_rate": 5.292661548982403e-06,
"loss": 0.1472,
"step": 111000
},
{
"epoch": 5.04753282028067,
"grad_norm": 0.11918644607067108,
"learning_rate": 5.268582106084394e-06,
"loss": 0.1614,
"step": 111500
},
{
"epoch": 5.070167496604799,
"grad_norm": 0.0681818500161171,
"learning_rate": 5.244502663186385e-06,
"loss": 0.1781,
"step": 112000
},
{
"epoch": 5.092802172928927,
"grad_norm": 62.686737060546875,
"learning_rate": 5.220423220288376e-06,
"loss": 0.1781,
"step": 112500
},
{
"epoch": 5.115436849253055,
"grad_norm": 73.86345672607422,
"learning_rate": 5.1963437773903666e-06,
"loss": 0.1679,
"step": 113000
},
{
"epoch": 5.1380715255771845,
"grad_norm": 0.13405387103557587,
"learning_rate": 5.172264334492357e-06,
"loss": 0.184,
"step": 113500
},
{
"epoch": 5.160706201901313,
"grad_norm": 38.33438491821289,
"learning_rate": 5.148184891594349e-06,
"loss": 0.1679,
"step": 114000
},
{
"epoch": 5.183340878225441,
"grad_norm": 0.36470118165016174,
"learning_rate": 5.124105448696339e-06,
"loss": 0.154,
"step": 114500
},
{
"epoch": 5.20597555454957,
"grad_norm": 31.240108489990234,
"learning_rate": 5.10002600579833e-06,
"loss": 0.1718,
"step": 115000
},
{
"epoch": 5.228610230873699,
"grad_norm": 0.2006056010723114,
"learning_rate": 5.075946562900322e-06,
"loss": 0.1624,
"step": 115500
},
{
"epoch": 5.251244907197827,
"grad_norm": 80.91893768310547,
"learning_rate": 5.0518671200023116e-06,
"loss": 0.1751,
"step": 116000
},
{
"epoch": 5.273879583521955,
"grad_norm": 39.18518829345703,
"learning_rate": 5.027787677104303e-06,
"loss": 0.1768,
"step": 116500
},
{
"epoch": 5.296514259846084,
"grad_norm": 10.39631175994873,
"learning_rate": 5.003708234206294e-06,
"loss": 0.186,
"step": 117000
},
{
"epoch": 5.319148936170213,
"grad_norm": 2.1696979999542236,
"learning_rate": 4.979628791308285e-06,
"loss": 0.175,
"step": 117500
},
{
"epoch": 5.341783612494341,
"grad_norm": 21.064584732055664,
"learning_rate": 4.955549348410276e-06,
"loss": 0.1625,
"step": 118000
},
{
"epoch": 5.36441828881847,
"grad_norm": 0.3759268522262573,
"learning_rate": 4.931469905512267e-06,
"loss": 0.1834,
"step": 118500
},
{
"epoch": 5.3870529651425985,
"grad_norm": 35.439117431640625,
"learning_rate": 4.9073904626142574e-06,
"loss": 0.1859,
"step": 119000
},
{
"epoch": 5.409687641466727,
"grad_norm": 63.224666595458984,
"learning_rate": 4.883311019716248e-06,
"loss": 0.1722,
"step": 119500
},
{
"epoch": 5.432322317790856,
"grad_norm": 2.553009033203125,
"learning_rate": 4.859231576818239e-06,
"loss": 0.173,
"step": 120000
},
{
"epoch": 5.454956994114984,
"grad_norm": 15.771255493164062,
"learning_rate": 4.83515213392023e-06,
"loss": 0.1922,
"step": 120500
},
{
"epoch": 5.4775916704391125,
"grad_norm": 108.56519317626953,
"learning_rate": 4.811072691022221e-06,
"loss": 0.1717,
"step": 121000
},
{
"epoch": 5.500226346763242,
"grad_norm": 78.23528289794922,
"learning_rate": 4.7869932481242124e-06,
"loss": 0.1822,
"step": 121500
},
{
"epoch": 5.52286102308737,
"grad_norm": 62.75898742675781,
"learning_rate": 4.7629138052262024e-06,
"loss": 0.1697,
"step": 122000
},
{
"epoch": 5.545495699411498,
"grad_norm": 136.13113403320312,
"learning_rate": 4.738834362328193e-06,
"loss": 0.1807,
"step": 122500
},
{
"epoch": 5.568130375735627,
"grad_norm": 52.2840461730957,
"learning_rate": 4.714754919430184e-06,
"loss": 0.1762,
"step": 123000
},
{
"epoch": 5.590765052059756,
"grad_norm": 4.957085609436035,
"learning_rate": 4.690675476532176e-06,
"loss": 0.1875,
"step": 123500
},
{
"epoch": 5.613399728383884,
"grad_norm": 39.328086853027344,
"learning_rate": 4.666596033634167e-06,
"loss": 0.1732,
"step": 124000
},
{
"epoch": 5.636034404708012,
"grad_norm": 3.6397218704223633,
"learning_rate": 4.642516590736157e-06,
"loss": 0.175,
"step": 124500
},
{
"epoch": 5.658669081032142,
"grad_norm": 32.74443435668945,
"learning_rate": 4.618437147838148e-06,
"loss": 0.1667,
"step": 125000
},
{
"epoch": 5.68130375735627,
"grad_norm": 32.64069366455078,
"learning_rate": 4.594357704940139e-06,
"loss": 0.1691,
"step": 125500
},
{
"epoch": 5.703938433680398,
"grad_norm": 21.668283462524414,
"learning_rate": 4.57027826204213e-06,
"loss": 0.1785,
"step": 126000
},
{
"epoch": 5.7265731100045265,
"grad_norm": 8.382264137268066,
"learning_rate": 4.546198819144121e-06,
"loss": 0.1829,
"step": 126500
},
{
"epoch": 5.749207786328656,
"grad_norm": 24.240978240966797,
"learning_rate": 4.522119376246112e-06,
"loss": 0.1573,
"step": 127000
},
{
"epoch": 5.771842462652784,
"grad_norm": 98.30403900146484,
"learning_rate": 4.4980399333481025e-06,
"loss": 0.1962,
"step": 127500
},
{
"epoch": 5.794477138976912,
"grad_norm": 0.6671485304832458,
"learning_rate": 4.473960490450093e-06,
"loss": 0.1813,
"step": 128000
},
{
"epoch": 5.8171118153010415,
"grad_norm": 71.27288055419922,
"learning_rate": 4.449881047552084e-06,
"loss": 0.1747,
"step": 128500
},
{
"epoch": 5.83974649162517,
"grad_norm": 148.5382537841797,
"learning_rate": 4.425801604654075e-06,
"loss": 0.1676,
"step": 129000
},
{
"epoch": 5.862381167949298,
"grad_norm": 0.19661898910999298,
"learning_rate": 4.401722161756066e-06,
"loss": 0.1771,
"step": 129500
},
{
"epoch": 5.885015844273427,
"grad_norm": 16.245052337646484,
"learning_rate": 4.377642718858057e-06,
"loss": 0.1864,
"step": 130000
},
{
"epoch": 5.907650520597556,
"grad_norm": 0.5395733118057251,
"learning_rate": 4.3535632759600475e-06,
"loss": 0.1775,
"step": 130500
},
{
"epoch": 5.930285196921684,
"grad_norm": 13.2942533493042,
"learning_rate": 4.329483833062038e-06,
"loss": 0.1669,
"step": 131000
},
{
"epoch": 5.952919873245813,
"grad_norm": 12.363393783569336,
"learning_rate": 4.30540439016403e-06,
"loss": 0.1747,
"step": 131500
},
{
"epoch": 5.975554549569941,
"grad_norm": 0.334881067276001,
"learning_rate": 4.281324947266021e-06,
"loss": 0.1941,
"step": 132000
},
{
"epoch": 5.99818922589407,
"grad_norm": 9.071168899536133,
"learning_rate": 4.257245504368011e-06,
"loss": 0.1745,
"step": 132500
},
{
"epoch": 6.0,
"eval_accuracy": 0.8641491176695272,
"eval_loss": 0.6998937726020813,
"eval_runtime": 26.0201,
"eval_samples_per_second": 1509.256,
"eval_steps_per_second": 94.35,
"step": 132540
},
{
"epoch": 6.020823902218198,
"grad_norm": 79.37480163574219,
"learning_rate": 4.2331660614700025e-06,
"loss": 0.1323,
"step": 133000
},
{
"epoch": 6.043458578542327,
"grad_norm": 97.10159301757812,
"learning_rate": 4.209086618571993e-06,
"loss": 0.1373,
"step": 133500
},
{
"epoch": 6.0660932548664555,
"grad_norm": 9.43271541595459,
"learning_rate": 4.185007175673984e-06,
"loss": 0.1422,
"step": 134000
},
{
"epoch": 6.088727931190584,
"grad_norm": 0.20963682234287262,
"learning_rate": 4.160927732775975e-06,
"loss": 0.1457,
"step": 134500
},
{
"epoch": 6.111362607514713,
"grad_norm": 55.66864776611328,
"learning_rate": 4.136848289877966e-06,
"loss": 0.1508,
"step": 135000
},
{
"epoch": 6.133997283838841,
"grad_norm": 84.8683090209961,
"learning_rate": 4.112768846979957e-06,
"loss": 0.1347,
"step": 135500
},
{
"epoch": 6.1566319601629695,
"grad_norm": 77.05133819580078,
"learning_rate": 4.0886894040819475e-06,
"loss": 0.1424,
"step": 136000
},
{
"epoch": 6.179266636487098,
"grad_norm": 0.16760210692882538,
"learning_rate": 4.064609961183938e-06,
"loss": 0.1401,
"step": 136500
},
{
"epoch": 6.201901312811227,
"grad_norm": 6.680587291717529,
"learning_rate": 4.040530518285929e-06,
"loss": 0.1579,
"step": 137000
},
{
"epoch": 6.224535989135355,
"grad_norm": 10.419951438903809,
"learning_rate": 4.01645107538792e-06,
"loss": 0.1441,
"step": 137500
},
{
"epoch": 6.247170665459484,
"grad_norm": 75.0548095703125,
"learning_rate": 3.992371632489911e-06,
"loss": 0.1489,
"step": 138000
},
{
"epoch": 6.269805341783613,
"grad_norm": 1.2689846754074097,
"learning_rate": 3.968292189591902e-06,
"loss": 0.1527,
"step": 138500
},
{
"epoch": 6.292440018107741,
"grad_norm": 23.291440963745117,
"learning_rate": 3.9442127466938925e-06,
"loss": 0.1508,
"step": 139000
},
{
"epoch": 6.315074694431869,
"grad_norm": 99.6236343383789,
"learning_rate": 3.920133303795884e-06,
"loss": 0.1561,
"step": 139500
},
{
"epoch": 6.337709370755999,
"grad_norm": 62.657745361328125,
"learning_rate": 3.896053860897875e-06,
"loss": 0.1316,
"step": 140000
},
{
"epoch": 6.360344047080127,
"grad_norm": 1.494821310043335,
"learning_rate": 3.871974417999865e-06,
"loss": 0.1371,
"step": 140500
},
{
"epoch": 6.382978723404255,
"grad_norm": 54.55570602416992,
"learning_rate": 3.847894975101857e-06,
"loss": 0.1281,
"step": 141000
},
{
"epoch": 6.4056133997283835,
"grad_norm": 39.3503303527832,
"learning_rate": 3.8238155322038475e-06,
"loss": 0.153,
"step": 141500
},
{
"epoch": 6.428248076052513,
"grad_norm": 211.0976104736328,
"learning_rate": 3.7997360893058384e-06,
"loss": 0.1529,
"step": 142000
},
{
"epoch": 6.450882752376641,
"grad_norm": 36.55986404418945,
"learning_rate": 3.775656646407829e-06,
"loss": 0.1446,
"step": 142500
},
{
"epoch": 6.473517428700769,
"grad_norm": 14.937396049499512,
"learning_rate": 3.7515772035098196e-06,
"loss": 0.157,
"step": 143000
},
{
"epoch": 6.4961521050248985,
"grad_norm": 0.12284702807664871,
"learning_rate": 3.727497760611811e-06,
"loss": 0.1576,
"step": 143500
},
{
"epoch": 6.518786781349027,
"grad_norm": 169.0521697998047,
"learning_rate": 3.7034183177138017e-06,
"loss": 0.1516,
"step": 144000
},
{
"epoch": 6.541421457673155,
"grad_norm": 119.7757339477539,
"learning_rate": 3.6793388748157925e-06,
"loss": 0.142,
"step": 144500
},
{
"epoch": 6.564056133997283,
"grad_norm": 0.5702412128448486,
"learning_rate": 3.6552594319177838e-06,
"loss": 0.1519,
"step": 145000
},
{
"epoch": 6.586690810321413,
"grad_norm": 0.6575600504875183,
"learning_rate": 3.631179989019774e-06,
"loss": 0.1494,
"step": 145500
},
{
"epoch": 6.609325486645541,
"grad_norm": 104.18098449707031,
"learning_rate": 3.607100546121765e-06,
"loss": 0.1431,
"step": 146000
},
{
"epoch": 6.631960162969669,
"grad_norm": 0.18219584226608276,
"learning_rate": 3.583021103223756e-06,
"loss": 0.1397,
"step": 146500
},
{
"epoch": 6.654594839293798,
"grad_norm": 39.80546569824219,
"learning_rate": 3.558941660325747e-06,
"loss": 0.1384,
"step": 147000
},
{
"epoch": 6.677229515617927,
"grad_norm": 70.61176300048828,
"learning_rate": 3.534862217427738e-06,
"loss": 0.1452,
"step": 147500
},
{
"epoch": 6.699864191942055,
"grad_norm": 0.11137774586677551,
"learning_rate": 3.5107827745297292e-06,
"loss": 0.1649,
"step": 148000
},
{
"epoch": 6.722498868266184,
"grad_norm": 1.3033461570739746,
"learning_rate": 3.4867033316317196e-06,
"loss": 0.1468,
"step": 148500
},
{
"epoch": 6.7451335445903124,
"grad_norm": 188.11358642578125,
"learning_rate": 3.4626238887337105e-06,
"loss": 0.1396,
"step": 149000
},
{
"epoch": 6.767768220914441,
"grad_norm": 186.4955596923828,
"learning_rate": 3.4385444458357013e-06,
"loss": 0.1415,
"step": 149500
},
{
"epoch": 6.79040289723857,
"grad_norm": 10.157150268554688,
"learning_rate": 3.4144650029376926e-06,
"loss": 0.1446,
"step": 150000
},
{
"epoch": 6.813037573562698,
"grad_norm": 14.647910118103027,
"learning_rate": 3.3903855600396834e-06,
"loss": 0.1363,
"step": 150500
},
{
"epoch": 6.8356722498868265,
"grad_norm": 0.07332862168550491,
"learning_rate": 3.366306117141674e-06,
"loss": 0.1622,
"step": 151000
},
{
"epoch": 6.858306926210955,
"grad_norm": 125.91682434082031,
"learning_rate": 3.342226674243665e-06,
"loss": 0.1408,
"step": 151500
},
{
"epoch": 6.880941602535084,
"grad_norm": 0.21492162346839905,
"learning_rate": 3.318147231345656e-06,
"loss": 0.1423,
"step": 152000
},
{
"epoch": 6.903576278859212,
"grad_norm": 27.321796417236328,
"learning_rate": 3.2940677884476467e-06,
"loss": 0.1562,
"step": 152500
},
{
"epoch": 6.926210955183341,
"grad_norm": 0.10927353799343109,
"learning_rate": 3.269988345549638e-06,
"loss": 0.1475,
"step": 153000
},
{
"epoch": 6.94884563150747,
"grad_norm": 97.3139877319336,
"learning_rate": 3.2459089026516284e-06,
"loss": 0.1438,
"step": 153500
},
{
"epoch": 6.971480307831598,
"grad_norm": 159.5480499267578,
"learning_rate": 3.2218294597536192e-06,
"loss": 0.1481,
"step": 154000
},
{
"epoch": 6.994114984155726,
"grad_norm": 0.06921840459108353,
"learning_rate": 3.19775001685561e-06,
"loss": 0.1671,
"step": 154500
},
{
"epoch": 7.0,
"eval_accuracy": 0.8639454050062387,
"eval_loss": 0.7750576734542847,
"eval_runtime": 26.0647,
"eval_samples_per_second": 1506.673,
"eval_steps_per_second": 94.189,
"step": 154630
},
{
"epoch": 7.016749660479855,
"grad_norm": 1.394852638244629,
"learning_rate": 3.1736705739576013e-06,
"loss": 0.1232,
"step": 155000
},
{
"epoch": 7.039384336803984,
"grad_norm": 0.33521416783332825,
"learning_rate": 3.149591131059592e-06,
"loss": 0.108,
"step": 155500
},
{
"epoch": 7.062019013128112,
"grad_norm": 0.06891336292028427,
"learning_rate": 3.1255116881615826e-06,
"loss": 0.1155,
"step": 156000
},
{
"epoch": 7.0846536894522405,
"grad_norm": 15.24691390991211,
"learning_rate": 3.101432245263574e-06,
"loss": 0.1266,
"step": 156500
},
{
"epoch": 7.10728836577637,
"grad_norm": 0.06932001560926437,
"learning_rate": 3.0773528023655647e-06,
"loss": 0.1114,
"step": 157000
},
{
"epoch": 7.129923042100498,
"grad_norm": 0.0540509857237339,
"learning_rate": 3.0532733594675555e-06,
"loss": 0.1279,
"step": 157500
},
{
"epoch": 7.152557718424626,
"grad_norm": 29.716217041015625,
"learning_rate": 3.0291939165695468e-06,
"loss": 0.115,
"step": 158000
},
{
"epoch": 7.1751923947487555,
"grad_norm": 0.0442744679749012,
"learning_rate": 3.0051144736715376e-06,
"loss": 0.1252,
"step": 158500
},
{
"epoch": 7.197827071072884,
"grad_norm": 7.542829513549805,
"learning_rate": 2.981035030773528e-06,
"loss": 0.1156,
"step": 159000
},
{
"epoch": 7.220461747397012,
"grad_norm": 11.190882682800293,
"learning_rate": 2.9569555878755193e-06,
"loss": 0.1065,
"step": 159500
},
{
"epoch": 7.24309642372114,
"grad_norm": 0.10643190145492554,
"learning_rate": 2.93287614497751e-06,
"loss": 0.1316,
"step": 160000
},
{
"epoch": 7.26573110004527,
"grad_norm": 0.16745133697986603,
"learning_rate": 2.908796702079501e-06,
"loss": 0.1101,
"step": 160500
},
{
"epoch": 7.288365776369398,
"grad_norm": 0.16076330840587616,
"learning_rate": 2.884717259181492e-06,
"loss": 0.1314,
"step": 161000
},
{
"epoch": 7.311000452693526,
"grad_norm": 0.10047034919261932,
"learning_rate": 2.8606378162834826e-06,
"loss": 0.1181,
"step": 161500
},
{
"epoch": 7.333635129017655,
"grad_norm": 0.331920862197876,
"learning_rate": 2.8365583733854734e-06,
"loss": 0.1259,
"step": 162000
},
{
"epoch": 7.356269805341784,
"grad_norm": 0.1453462541103363,
"learning_rate": 2.8124789304874643e-06,
"loss": 0.1249,
"step": 162500
},
{
"epoch": 7.378904481665912,
"grad_norm": 0.8490937948226929,
"learning_rate": 2.7883994875894555e-06,
"loss": 0.1209,
"step": 163000
},
{
"epoch": 7.401539157990041,
"grad_norm": 0.22700923681259155,
"learning_rate": 2.7643200446914464e-06,
"loss": 0.1244,
"step": 163500
},
{
"epoch": 7.424173834314169,
"grad_norm": 0.19261109828948975,
"learning_rate": 2.7402406017934368e-06,
"loss": 0.135,
"step": 164000
},
{
"epoch": 7.446808510638298,
"grad_norm": 75.31595611572266,
"learning_rate": 2.716161158895428e-06,
"loss": 0.1262,
"step": 164500
},
{
"epoch": 7.469443186962426,
"grad_norm": 65.7965087890625,
"learning_rate": 2.692081715997419e-06,
"loss": 0.1139,
"step": 165000
},
{
"epoch": 7.492077863286555,
"grad_norm": 114.45712280273438,
"learning_rate": 2.6680022730994097e-06,
"loss": 0.1335,
"step": 165500
},
{
"epoch": 7.5147125396106835,
"grad_norm": 0.08683761209249496,
"learning_rate": 2.643922830201401e-06,
"loss": 0.1257,
"step": 166000
},
{
"epoch": 7.537347215934812,
"grad_norm": 91.00257873535156,
"learning_rate": 2.6198433873033918e-06,
"loss": 0.1307,
"step": 166500
},
{
"epoch": 7.559981892258941,
"grad_norm": 0.0967201367020607,
"learning_rate": 2.595763944405382e-06,
"loss": 0.1163,
"step": 167000
},
{
"epoch": 7.582616568583069,
"grad_norm": 179.25857543945312,
"learning_rate": 2.5716845015073735e-06,
"loss": 0.1207,
"step": 167500
},
{
"epoch": 7.605251244907198,
"grad_norm": 0.09339158982038498,
"learning_rate": 2.5476050586093643e-06,
"loss": 0.1284,
"step": 168000
},
{
"epoch": 7.627885921231327,
"grad_norm": 0.0970580130815506,
"learning_rate": 2.523525615711355e-06,
"loss": 0.1222,
"step": 168500
},
{
"epoch": 7.650520597555455,
"grad_norm": 0.26078376173973083,
"learning_rate": 2.499446172813346e-06,
"loss": 0.1225,
"step": 169000
},
{
"epoch": 7.673155273879583,
"grad_norm": 29.06781005859375,
"learning_rate": 2.475366729915337e-06,
"loss": 0.1238,
"step": 169500
},
{
"epoch": 7.695789950203712,
"grad_norm": 4.162774085998535,
"learning_rate": 2.4512872870173276e-06,
"loss": 0.1323,
"step": 170000
},
{
"epoch": 7.718424626527841,
"grad_norm": 0.9394495487213135,
"learning_rate": 2.4272078441193185e-06,
"loss": 0.1173,
"step": 170500
},
{
"epoch": 7.741059302851969,
"grad_norm": 0.06645090132951736,
"learning_rate": 2.4031284012213097e-06,
"loss": 0.1275,
"step": 171000
},
{
"epoch": 7.7636939791760975,
"grad_norm": 0.12068886309862137,
"learning_rate": 2.3790489583233006e-06,
"loss": 0.1103,
"step": 171500
},
{
"epoch": 7.786328655500227,
"grad_norm": 0.18481621146202087,
"learning_rate": 2.3549695154252914e-06,
"loss": 0.1225,
"step": 172000
},
{
"epoch": 7.808963331824355,
"grad_norm": 0.0315103605389595,
"learning_rate": 2.3308900725272822e-06,
"loss": 0.1061,
"step": 172500
},
{
"epoch": 7.831598008148483,
"grad_norm": 2.336836814880371,
"learning_rate": 2.306810629629273e-06,
"loss": 0.1226,
"step": 173000
},
{
"epoch": 7.854232684472612,
"grad_norm": 32.957130432128906,
"learning_rate": 2.282731186731264e-06,
"loss": 0.1228,
"step": 173500
},
{
"epoch": 7.876867360796741,
"grad_norm": 0.14461065828800201,
"learning_rate": 2.2586517438332547e-06,
"loss": 0.1172,
"step": 174000
},
{
"epoch": 7.899502037120869,
"grad_norm": 0.13647380471229553,
"learning_rate": 2.2345723009352456e-06,
"loss": 0.1242,
"step": 174500
},
{
"epoch": 7.922136713444997,
"grad_norm": 0.14755909144878387,
"learning_rate": 2.210492858037237e-06,
"loss": 0.1167,
"step": 175000
},
{
"epoch": 7.944771389769127,
"grad_norm": 0.16207629442214966,
"learning_rate": 2.1864134151392277e-06,
"loss": 0.127,
"step": 175500
},
{
"epoch": 7.967406066093255,
"grad_norm": 15.389420509338379,
"learning_rate": 2.1623339722412185e-06,
"loss": 0.1163,
"step": 176000
},
{
"epoch": 7.990040742417383,
"grad_norm": 202.52561950683594,
"learning_rate": 2.1382545293432093e-06,
"loss": 0.121,
"step": 176500
},
{
"epoch": 8.0,
"eval_accuracy": 0.8648621119910367,
"eval_loss": 0.8655109405517578,
"eval_runtime": 26.0286,
"eval_samples_per_second": 1508.765,
"eval_steps_per_second": 94.319,
"step": 176720
},
{
"epoch": 8.012675418741512,
"grad_norm": 1.5518616437911987,
"learning_rate": 2.1141750864452e-06,
"loss": 0.1058,
"step": 177000
},
{
"epoch": 8.03531009506564,
"grad_norm": 0.015344664454460144,
"learning_rate": 2.090095643547191e-06,
"loss": 0.0809,
"step": 177500
},
{
"epoch": 8.057944771389769,
"grad_norm": 100.70498657226562,
"learning_rate": 2.066016200649182e-06,
"loss": 0.094,
"step": 178000
},
{
"epoch": 8.080579447713898,
"grad_norm": 0.47632962465286255,
"learning_rate": 2.0419367577511727e-06,
"loss": 0.1115,
"step": 178500
},
{
"epoch": 8.103214124038026,
"grad_norm": 6.641209125518799,
"learning_rate": 2.017857314853164e-06,
"loss": 0.0856,
"step": 179000
},
{
"epoch": 8.125848800362155,
"grad_norm": 27.82591438293457,
"learning_rate": 1.9937778719551548e-06,
"loss": 0.0983,
"step": 179500
},
{
"epoch": 8.148483476686284,
"grad_norm": 314.1797790527344,
"learning_rate": 1.9696984290571456e-06,
"loss": 0.1075,
"step": 180000
},
{
"epoch": 8.171118153010411,
"grad_norm": 254.616455078125,
"learning_rate": 1.9456189861591364e-06,
"loss": 0.1016,
"step": 180500
},
{
"epoch": 8.19375282933454,
"grad_norm": 0.07589972764253616,
"learning_rate": 1.9215395432611273e-06,
"loss": 0.0924,
"step": 181000
},
{
"epoch": 8.21638750565867,
"grad_norm": 77.42697143554688,
"learning_rate": 1.8974601003631183e-06,
"loss": 0.0965,
"step": 181500
},
{
"epoch": 8.239022181982797,
"grad_norm": 0.12723857164382935,
"learning_rate": 1.873380657465109e-06,
"loss": 0.1024,
"step": 182000
},
{
"epoch": 8.261656858306926,
"grad_norm": 7.569960594177246,
"learning_rate": 1.8493012145671e-06,
"loss": 0.1201,
"step": 182500
},
{
"epoch": 8.284291534631055,
"grad_norm": 92.7170181274414,
"learning_rate": 1.825221771669091e-06,
"loss": 0.0976,
"step": 183000
},
{
"epoch": 8.306926210955183,
"grad_norm": 0.009992193430662155,
"learning_rate": 1.8011423287710816e-06,
"loss": 0.1025,
"step": 183500
},
{
"epoch": 8.329560887279312,
"grad_norm": 0.11167449504137039,
"learning_rate": 1.7770628858730727e-06,
"loss": 0.09,
"step": 184000
},
{
"epoch": 8.352195563603441,
"grad_norm": 54.87889099121094,
"learning_rate": 1.7529834429750633e-06,
"loss": 0.0993,
"step": 184500
},
{
"epoch": 8.374830239927569,
"grad_norm": 0.07053136825561523,
"learning_rate": 1.7289040000770544e-06,
"loss": 0.0987,
"step": 185000
},
{
"epoch": 8.397464916251698,
"grad_norm": 4.904270648956299,
"learning_rate": 1.7048245571790454e-06,
"loss": 0.105,
"step": 185500
},
{
"epoch": 8.420099592575827,
"grad_norm": 0.10817304253578186,
"learning_rate": 1.680745114281036e-06,
"loss": 0.0977,
"step": 186000
},
{
"epoch": 8.442734268899954,
"grad_norm": 0.03593330830335617,
"learning_rate": 1.656665671383027e-06,
"loss": 0.0954,
"step": 186500
},
{
"epoch": 8.465368945224084,
"grad_norm": 105.52520751953125,
"learning_rate": 1.6325862284850181e-06,
"loss": 0.1065,
"step": 187000
},
{
"epoch": 8.488003621548211,
"grad_norm": 0.19925498962402344,
"learning_rate": 1.6085067855870087e-06,
"loss": 0.0983,
"step": 187500
},
{
"epoch": 8.51063829787234,
"grad_norm": 0.29446855187416077,
"learning_rate": 1.5844273426889998e-06,
"loss": 0.1015,
"step": 188000
},
{
"epoch": 8.53327297419647,
"grad_norm": 13.635686874389648,
"learning_rate": 1.5603478997909904e-06,
"loss": 0.0902,
"step": 188500
},
{
"epoch": 8.555907650520597,
"grad_norm": 0.05709734186530113,
"learning_rate": 1.5362684568929815e-06,
"loss": 0.0972,
"step": 189000
},
{
"epoch": 8.578542326844726,
"grad_norm": 0.023464586585760117,
"learning_rate": 1.5121890139949725e-06,
"loss": 0.1121,
"step": 189500
},
{
"epoch": 8.601177003168855,
"grad_norm": 0.10528367012739182,
"learning_rate": 1.4881095710969631e-06,
"loss": 0.1001,
"step": 190000
},
{
"epoch": 8.623811679492983,
"grad_norm": 0.4939417243003845,
"learning_rate": 1.4640301281989542e-06,
"loss": 0.098,
"step": 190500
},
{
"epoch": 8.646446355817112,
"grad_norm": 12.716562271118164,
"learning_rate": 1.4399506853009452e-06,
"loss": 0.0945,
"step": 191000
},
{
"epoch": 8.669081032141241,
"grad_norm": 145.86587524414062,
"learning_rate": 1.4158712424029358e-06,
"loss": 0.0888,
"step": 191500
},
{
"epoch": 8.691715708465368,
"grad_norm": 0.03197444975376129,
"learning_rate": 1.3917917995049269e-06,
"loss": 0.0858,
"step": 192000
},
{
"epoch": 8.714350384789498,
"grad_norm": 0.11782459169626236,
"learning_rate": 1.3677123566069175e-06,
"loss": 0.1027,
"step": 192500
},
{
"epoch": 8.736985061113627,
"grad_norm": 0.1515623927116394,
"learning_rate": 1.3436329137089086e-06,
"loss": 0.1003,
"step": 193000
},
{
"epoch": 8.759619737437754,
"grad_norm": 0.04349144920706749,
"learning_rate": 1.3195534708108996e-06,
"loss": 0.0926,
"step": 193500
},
{
"epoch": 8.782254413761883,
"grad_norm": 14.532307624816895,
"learning_rate": 1.2954740279128902e-06,
"loss": 0.0935,
"step": 194000
},
{
"epoch": 8.80488909008601,
"grad_norm": 0.05790287256240845,
"learning_rate": 1.2713945850148813e-06,
"loss": 0.0923,
"step": 194500
},
{
"epoch": 8.82752376641014,
"grad_norm": 81.71268463134766,
"learning_rate": 1.247315142116872e-06,
"loss": 0.1172,
"step": 195000
},
{
"epoch": 8.85015844273427,
"grad_norm": 0.03535538911819458,
"learning_rate": 1.223235699218863e-06,
"loss": 0.0985,
"step": 195500
},
{
"epoch": 8.872793119058397,
"grad_norm": 0.05989941582083702,
"learning_rate": 1.199156256320854e-06,
"loss": 0.1033,
"step": 196000
},
{
"epoch": 8.895427795382526,
"grad_norm": 0.18569760024547577,
"learning_rate": 1.1750768134228448e-06,
"loss": 0.1178,
"step": 196500
},
{
"epoch": 8.918062471706655,
"grad_norm": 0.02892606146633625,
"learning_rate": 1.1509973705248357e-06,
"loss": 0.0964,
"step": 197000
},
{
"epoch": 8.940697148030782,
"grad_norm": 181.0758819580078,
"learning_rate": 1.1269179276268265e-06,
"loss": 0.1056,
"step": 197500
},
{
"epoch": 8.963331824354912,
"grad_norm": 28.286996841430664,
"learning_rate": 1.1028384847288175e-06,
"loss": 0.1055,
"step": 198000
},
{
"epoch": 8.98596650067904,
"grad_norm": 0.08270686864852905,
"learning_rate": 1.0787590418308084e-06,
"loss": 0.1083,
"step": 198500
},
{
"epoch": 9.0,
"eval_accuracy": 0.8651931450688803,
"eval_loss": 0.9116848111152649,
"eval_runtime": 25.9694,
"eval_samples_per_second": 1512.2,
"eval_steps_per_second": 94.534,
"step": 198810
},
{
"epoch": 9.008601177003168,
"grad_norm": 0.3910556733608246,
"learning_rate": 1.0546795989327992e-06,
"loss": 0.0919,
"step": 199000
},
{
"epoch": 9.031235853327297,
"grad_norm": 39.47013854980469,
"learning_rate": 1.03060015603479e-06,
"loss": 0.0828,
"step": 199500
},
{
"epoch": 9.053870529651427,
"grad_norm": 0.02229388989508152,
"learning_rate": 1.006520713136781e-06,
"loss": 0.0814,
"step": 200000
},
{
"epoch": 9.076505205975554,
"grad_norm": 0.028238942846655846,
"learning_rate": 9.82441270238772e-07,
"loss": 0.0835,
"step": 200500
},
{
"epoch": 9.099139882299683,
"grad_norm": 10.808701515197754,
"learning_rate": 9.583618273407628e-07,
"loss": 0.0923,
"step": 201000
},
{
"epoch": 9.121774558623812,
"grad_norm": 221.81275939941406,
"learning_rate": 9.342823844427536e-07,
"loss": 0.0665,
"step": 201500
},
{
"epoch": 9.14440923494794,
"grad_norm": 0.1080513447523117,
"learning_rate": 9.102029415447445e-07,
"loss": 0.0876,
"step": 202000
},
{
"epoch": 9.167043911272069,
"grad_norm": 232.07345581054688,
"learning_rate": 8.861234986467354e-07,
"loss": 0.0863,
"step": 202500
},
{
"epoch": 9.189678587596198,
"grad_norm": 0.04613710194826126,
"learning_rate": 8.620440557487263e-07,
"loss": 0.0938,
"step": 203000
},
{
"epoch": 9.212313263920326,
"grad_norm": 0.022649744525551796,
"learning_rate": 8.379646128507171e-07,
"loss": 0.0815,
"step": 203500
},
{
"epoch": 9.234947940244455,
"grad_norm": 0.0704297199845314,
"learning_rate": 8.138851699527081e-07,
"loss": 0.0793,
"step": 204000
},
{
"epoch": 9.257582616568584,
"grad_norm": 169.63650512695312,
"learning_rate": 7.898057270546989e-07,
"loss": 0.0757,
"step": 204500
},
{
"epoch": 9.280217292892711,
"grad_norm": 145.97549438476562,
"learning_rate": 7.657262841566899e-07,
"loss": 0.0818,
"step": 205000
},
{
"epoch": 9.30285196921684,
"grad_norm": 0.3320428431034088,
"learning_rate": 7.416468412586807e-07,
"loss": 0.0996,
"step": 205500
},
{
"epoch": 9.325486645540968,
"grad_norm": 2.268958806991577,
"learning_rate": 7.175673983606715e-07,
"loss": 0.0835,
"step": 206000
},
{
"epoch": 9.348121321865097,
"grad_norm": 0.038560718297958374,
"learning_rate": 6.934879554626625e-07,
"loss": 0.0822,
"step": 206500
},
{
"epoch": 9.370755998189226,
"grad_norm": 92.30923461914062,
"learning_rate": 6.694085125646534e-07,
"loss": 0.0854,
"step": 207000
},
{
"epoch": 9.393390674513354,
"grad_norm": 0.2284342646598816,
"learning_rate": 6.453290696666442e-07,
"loss": 0.0807,
"step": 207500
},
{
"epoch": 9.416025350837483,
"grad_norm": 0.8369685411453247,
"learning_rate": 6.212496267686352e-07,
"loss": 0.0744,
"step": 208000
},
{
"epoch": 9.438660027161612,
"grad_norm": 0.49937498569488525,
"learning_rate": 5.97170183870626e-07,
"loss": 0.0942,
"step": 208500
},
{
"epoch": 9.46129470348574,
"grad_norm": 0.020057352259755135,
"learning_rate": 5.73090740972617e-07,
"loss": 0.0748,
"step": 209000
},
{
"epoch": 9.483929379809869,
"grad_norm": 196.6532745361328,
"learning_rate": 5.490112980746078e-07,
"loss": 0.0964,
"step": 209500
},
{
"epoch": 9.506564056133998,
"grad_norm": 0.5909414887428284,
"learning_rate": 5.249318551765987e-07,
"loss": 0.0759,
"step": 210000
},
{
"epoch": 9.529198732458125,
"grad_norm": 135.35101318359375,
"learning_rate": 5.008524122785896e-07,
"loss": 0.0804,
"step": 210500
},
{
"epoch": 9.551833408782255,
"grad_norm": 0.0622185617685318,
"learning_rate": 4.7677296938058045e-07,
"loss": 0.0709,
"step": 211000
},
{
"epoch": 9.574468085106384,
"grad_norm": 0.05341747775673866,
"learning_rate": 4.526935264825713e-07,
"loss": 0.0863,
"step": 211500
},
{
"epoch": 9.597102761430511,
"grad_norm": 55.312278747558594,
"learning_rate": 4.286140835845622e-07,
"loss": 0.0782,
"step": 212000
},
{
"epoch": 9.61973743775464,
"grad_norm": 0.06182483211159706,
"learning_rate": 4.0453464068655306e-07,
"loss": 0.0881,
"step": 212500
},
{
"epoch": 9.64237211407877,
"grad_norm": 0.06101556122303009,
"learning_rate": 3.80455197788544e-07,
"loss": 0.0834,
"step": 213000
},
{
"epoch": 9.665006790402897,
"grad_norm": 0.35538122057914734,
"learning_rate": 3.5637575489053483e-07,
"loss": 0.0776,
"step": 213500
},
{
"epoch": 9.687641466727026,
"grad_norm": 0.08858389407396317,
"learning_rate": 3.322963119925258e-07,
"loss": 0.0832,
"step": 214000
},
{
"epoch": 9.710276143051153,
"grad_norm": 0.07368449866771698,
"learning_rate": 3.0821686909451666e-07,
"loss": 0.0921,
"step": 214500
},
{
"epoch": 9.732910819375283,
"grad_norm": 0.4127441644668579,
"learning_rate": 2.8413742619650755e-07,
"loss": 0.0846,
"step": 215000
},
{
"epoch": 9.755545495699412,
"grad_norm": 0.06469714641571045,
"learning_rate": 2.6005798329849844e-07,
"loss": 0.0928,
"step": 215500
},
{
"epoch": 9.77818017202354,
"grad_norm": 0.28226494789123535,
"learning_rate": 2.3597854040048932e-07,
"loss": 0.0796,
"step": 216000
},
{
"epoch": 9.800814848347668,
"grad_norm": 250.34405517578125,
"learning_rate": 2.118990975024802e-07,
"loss": 0.0859,
"step": 216500
},
{
"epoch": 9.823449524671798,
"grad_norm": 22.07843589782715,
"learning_rate": 1.878196546044711e-07,
"loss": 0.0846,
"step": 217000
},
{
"epoch": 9.846084200995925,
"grad_norm": 14.63901138305664,
"learning_rate": 1.6374021170646199e-07,
"loss": 0.0788,
"step": 217500
},
{
"epoch": 9.868718877320054,
"grad_norm": 80.16069793701172,
"learning_rate": 1.3966076880845285e-07,
"loss": 0.095,
"step": 218000
},
{
"epoch": 9.891353553644183,
"grad_norm": 0.07123162597417831,
"learning_rate": 1.1558132591044375e-07,
"loss": 0.0935,
"step": 218500
},
{
"epoch": 9.91398822996831,
"grad_norm": 17.373336791992188,
"learning_rate": 9.150188301243464e-08,
"loss": 0.0839,
"step": 219000
},
{
"epoch": 9.93662290629244,
"grad_norm": 0.0160963274538517,
"learning_rate": 6.742244011442552e-08,
"loss": 0.0746,
"step": 219500
},
{
"epoch": 9.95925758261657,
"grad_norm": 0.07339876890182495,
"learning_rate": 4.3342997216416404e-08,
"loss": 0.0755,
"step": 220000
},
{
"epoch": 9.981892258940697,
"grad_norm": 0.06764261424541473,
"learning_rate": 1.926355431840729e-08,
"loss": 0.0835,
"step": 220500
},
{
"epoch": 10.0,
"eval_accuracy": 0.8659570675562119,
"eval_loss": 0.9559618830680847,
"eval_runtime": 26.0025,
"eval_samples_per_second": 1510.279,
"eval_steps_per_second": 94.414,
"step": 220900
},
{
"epoch": 10.0,
"step": 220900,
"total_flos": 1.4387669402147813e+17,
"train_loss": 0.2225576283846467,
"train_runtime": 11995.4266,
"train_samples_per_second": 294.638,
"train_steps_per_second": 18.415
}
],
"logging_steps": 500,
"max_steps": 220900,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.4387669402147813e+17,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}