SALAMA_C7 / checkpoint-4000 /trainer_state.json
EYEDOL's picture
Upload folder using huggingface_hub
2b1bb1e verified
{
"best_global_step": 2000,
"best_metric": 38.607193210094984,
"best_model_checkpoint": "./SALAMA_C7/checkpoint-2000",
"epoch": 1.6294561010389081,
"eval_steps": 2000,
"global_step": 4000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.008148299042574863,
"grad_norm": 15.915007591247559,
"learning_rate": 1.9000000000000002e-06,
"loss": 1.0767,
"step": 20
},
{
"epoch": 0.016296598085149726,
"grad_norm": 10.589001655578613,
"learning_rate": 3.900000000000001e-06,
"loss": 0.9639,
"step": 40
},
{
"epoch": 0.024444897127724588,
"grad_norm": 11.770127296447754,
"learning_rate": 5.9e-06,
"loss": 0.9679,
"step": 60
},
{
"epoch": 0.03259319617029945,
"grad_norm": 11.00736141204834,
"learning_rate": 7.9e-06,
"loss": 0.9264,
"step": 80
},
{
"epoch": 0.04074149521287431,
"grad_norm": 8.219990730285645,
"learning_rate": 9.9e-06,
"loss": 0.7982,
"step": 100
},
{
"epoch": 0.048889794255449176,
"grad_norm": 7.805874824523926,
"learning_rate": 9.973847212663455e-06,
"loss": 0.7226,
"step": 120
},
{
"epoch": 0.057038093298024034,
"grad_norm": 6.7659430503845215,
"learning_rate": 9.946317962835514e-06,
"loss": 0.7378,
"step": 140
},
{
"epoch": 0.0651863923405989,
"grad_norm": 6.5482282638549805,
"learning_rate": 9.91878871300757e-06,
"loss": 0.716,
"step": 160
},
{
"epoch": 0.07333469138317376,
"grad_norm": 6.729607105255127,
"learning_rate": 9.891259463179629e-06,
"loss": 0.7048,
"step": 180
},
{
"epoch": 0.08148299042574862,
"grad_norm": 8.517411231994629,
"learning_rate": 9.863730213351686e-06,
"loss": 0.7016,
"step": 200
},
{
"epoch": 0.0896312894683235,
"grad_norm": 6.785459041595459,
"learning_rate": 9.836200963523744e-06,
"loss": 0.7395,
"step": 220
},
{
"epoch": 0.09777958851089835,
"grad_norm": 6.437356472015381,
"learning_rate": 9.808671713695803e-06,
"loss": 0.6579,
"step": 240
},
{
"epoch": 0.10592788755347321,
"grad_norm": 6.3125762939453125,
"learning_rate": 9.781142463867861e-06,
"loss": 0.6969,
"step": 260
},
{
"epoch": 0.11407618659604807,
"grad_norm": 6.493209362030029,
"learning_rate": 9.753613214039918e-06,
"loss": 0.6338,
"step": 280
},
{
"epoch": 0.12222448563862294,
"grad_norm": 8.855860710144043,
"learning_rate": 9.726083964211977e-06,
"loss": 0.6657,
"step": 300
},
{
"epoch": 0.1303727846811978,
"grad_norm": 5.335328578948975,
"learning_rate": 9.698554714384033e-06,
"loss": 0.6906,
"step": 320
},
{
"epoch": 0.13852108372377267,
"grad_norm": 6.506489276885986,
"learning_rate": 9.671025464556092e-06,
"loss": 0.6485,
"step": 340
},
{
"epoch": 0.14666938276634753,
"grad_norm": 5.023345947265625,
"learning_rate": 9.64349621472815e-06,
"loss": 0.6291,
"step": 360
},
{
"epoch": 0.15481768180892239,
"grad_norm": 8.132065773010254,
"learning_rate": 9.615966964900207e-06,
"loss": 0.6553,
"step": 380
},
{
"epoch": 0.16296598085149724,
"grad_norm": 7.615749359130859,
"learning_rate": 9.588437715072266e-06,
"loss": 0.6635,
"step": 400
},
{
"epoch": 0.1711142798940721,
"grad_norm": 6.487279415130615,
"learning_rate": 9.560908465244322e-06,
"loss": 0.7554,
"step": 420
},
{
"epoch": 0.179262578936647,
"grad_norm": 6.409642219543457,
"learning_rate": 9.53337921541638e-06,
"loss": 0.5839,
"step": 440
},
{
"epoch": 0.18741087797922185,
"grad_norm": 5.239986419677734,
"learning_rate": 9.505849965588438e-06,
"loss": 0.7271,
"step": 460
},
{
"epoch": 0.1955591770217967,
"grad_norm": 5.452706336975098,
"learning_rate": 9.478320715760496e-06,
"loss": 0.6758,
"step": 480
},
{
"epoch": 0.20370747606437156,
"grad_norm": 4.827511787414551,
"learning_rate": 9.450791465932555e-06,
"loss": 0.6178,
"step": 500
},
{
"epoch": 0.21185577510694642,
"grad_norm": 6.320280075073242,
"learning_rate": 9.423262216104611e-06,
"loss": 0.6781,
"step": 520
},
{
"epoch": 0.22000407414952128,
"grad_norm": 5.648658275604248,
"learning_rate": 9.39573296627667e-06,
"loss": 0.6448,
"step": 540
},
{
"epoch": 0.22815237319209614,
"grad_norm": 6.806399822235107,
"learning_rate": 9.368203716448727e-06,
"loss": 0.6206,
"step": 560
},
{
"epoch": 0.23630067223467102,
"grad_norm": 6.556358337402344,
"learning_rate": 9.340674466620785e-06,
"loss": 0.6105,
"step": 580
},
{
"epoch": 0.24444897127724588,
"grad_norm": 5.21808385848999,
"learning_rate": 9.313145216792842e-06,
"loss": 0.651,
"step": 600
},
{
"epoch": 0.2525972703198207,
"grad_norm": 6.145657539367676,
"learning_rate": 9.2856159669649e-06,
"loss": 0.6504,
"step": 620
},
{
"epoch": 0.2607455693623956,
"grad_norm": 4.201127529144287,
"learning_rate": 9.258086717136959e-06,
"loss": 0.5885,
"step": 640
},
{
"epoch": 0.2688938684049705,
"grad_norm": 5.38640022277832,
"learning_rate": 9.230557467309017e-06,
"loss": 0.5888,
"step": 660
},
{
"epoch": 0.27704216744754534,
"grad_norm": 5.217910289764404,
"learning_rate": 9.203028217481074e-06,
"loss": 0.5901,
"step": 680
},
{
"epoch": 0.2851904664901202,
"grad_norm": 5.227973461151123,
"learning_rate": 9.175498967653133e-06,
"loss": 0.7178,
"step": 700
},
{
"epoch": 0.29333876553269506,
"grad_norm": 6.235045909881592,
"learning_rate": 9.147969717825191e-06,
"loss": 0.6711,
"step": 720
},
{
"epoch": 0.3014870645752699,
"grad_norm": 5.588578224182129,
"learning_rate": 9.120440467997248e-06,
"loss": 0.522,
"step": 740
},
{
"epoch": 0.30963536361784477,
"grad_norm": 5.49254035949707,
"learning_rate": 9.092911218169306e-06,
"loss": 0.5962,
"step": 760
},
{
"epoch": 0.31778366266041963,
"grad_norm": 5.4190239906311035,
"learning_rate": 9.065381968341363e-06,
"loss": 0.5981,
"step": 780
},
{
"epoch": 0.3259319617029945,
"grad_norm": 6.350463390350342,
"learning_rate": 9.037852718513422e-06,
"loss": 0.6114,
"step": 800
},
{
"epoch": 0.33408026074556935,
"grad_norm": 6.231777191162109,
"learning_rate": 9.010323468685478e-06,
"loss": 0.6729,
"step": 820
},
{
"epoch": 0.3422285597881442,
"grad_norm": 6.590610027313232,
"learning_rate": 8.982794218857537e-06,
"loss": 0.6124,
"step": 840
},
{
"epoch": 0.35037685883071906,
"grad_norm": 5.504190444946289,
"learning_rate": 8.955264969029595e-06,
"loss": 0.6555,
"step": 860
},
{
"epoch": 0.358525157873294,
"grad_norm": 4.821358680725098,
"learning_rate": 8.927735719201652e-06,
"loss": 0.5743,
"step": 880
},
{
"epoch": 0.36667345691586883,
"grad_norm": 6.006564140319824,
"learning_rate": 8.90020646937371e-06,
"loss": 0.6157,
"step": 900
},
{
"epoch": 0.3748217559584437,
"grad_norm": 5.9578471183776855,
"learning_rate": 8.872677219545767e-06,
"loss": 0.6485,
"step": 920
},
{
"epoch": 0.38297005500101855,
"grad_norm": 5.149762153625488,
"learning_rate": 8.845147969717826e-06,
"loss": 0.5951,
"step": 940
},
{
"epoch": 0.3911183540435934,
"grad_norm": 6.143222332000732,
"learning_rate": 8.817618719889883e-06,
"loss": 0.5805,
"step": 960
},
{
"epoch": 0.39926665308616827,
"grad_norm": 5.496342658996582,
"learning_rate": 8.790089470061941e-06,
"loss": 0.6158,
"step": 980
},
{
"epoch": 0.4074149521287431,
"grad_norm": 5.14535665512085,
"learning_rate": 8.762560220234e-06,
"loss": 0.6036,
"step": 1000
},
{
"epoch": 0.415563251171318,
"grad_norm": 5.73581600189209,
"learning_rate": 8.735030970406058e-06,
"loss": 0.5925,
"step": 1020
},
{
"epoch": 0.42371155021389284,
"grad_norm": 4.455753326416016,
"learning_rate": 8.707501720578115e-06,
"loss": 0.5894,
"step": 1040
},
{
"epoch": 0.4318598492564677,
"grad_norm": 6.366560935974121,
"learning_rate": 8.679972470750173e-06,
"loss": 0.6424,
"step": 1060
},
{
"epoch": 0.44000814829904256,
"grad_norm": 5.11430025100708,
"learning_rate": 8.652443220922232e-06,
"loss": 0.5731,
"step": 1080
},
{
"epoch": 0.4481564473416174,
"grad_norm": 6.0588812828063965,
"learning_rate": 8.624913971094288e-06,
"loss": 0.6257,
"step": 1100
},
{
"epoch": 0.4563047463841923,
"grad_norm": 6.879603385925293,
"learning_rate": 8.597384721266347e-06,
"loss": 0.6427,
"step": 1120
},
{
"epoch": 0.4644530454267672,
"grad_norm": 4.6673502922058105,
"learning_rate": 8.569855471438404e-06,
"loss": 0.5763,
"step": 1140
},
{
"epoch": 0.47260134446934204,
"grad_norm": 5.316527843475342,
"learning_rate": 8.542326221610462e-06,
"loss": 0.6207,
"step": 1160
},
{
"epoch": 0.4807496435119169,
"grad_norm": 5.4205780029296875,
"learning_rate": 8.514796971782519e-06,
"loss": 0.5435,
"step": 1180
},
{
"epoch": 0.48889794255449176,
"grad_norm": 4.579510688781738,
"learning_rate": 8.487267721954577e-06,
"loss": 0.5937,
"step": 1200
},
{
"epoch": 0.4970462415970666,
"grad_norm": 6.327101707458496,
"learning_rate": 8.459738472126636e-06,
"loss": 0.6186,
"step": 1220
},
{
"epoch": 0.5051945406396414,
"grad_norm": 5.347973346710205,
"learning_rate": 8.432209222298693e-06,
"loss": 0.564,
"step": 1240
},
{
"epoch": 0.5133428396822164,
"grad_norm": 5.44881010055542,
"learning_rate": 8.404679972470751e-06,
"loss": 0.5221,
"step": 1260
},
{
"epoch": 0.5214911387247912,
"grad_norm": 5.802955150604248,
"learning_rate": 8.377150722642808e-06,
"loss": 0.5893,
"step": 1280
},
{
"epoch": 0.5296394377673661,
"grad_norm": 5.931309223175049,
"learning_rate": 8.349621472814866e-06,
"loss": 0.6306,
"step": 1300
},
{
"epoch": 0.537787736809941,
"grad_norm": 5.793541431427002,
"learning_rate": 8.322092222986923e-06,
"loss": 0.6199,
"step": 1320
},
{
"epoch": 0.5459360358525158,
"grad_norm": 4.972778797149658,
"learning_rate": 8.294562973158982e-06,
"loss": 0.4771,
"step": 1340
},
{
"epoch": 0.5540843348950907,
"grad_norm": 5.573855876922607,
"learning_rate": 8.26703372333104e-06,
"loss": 0.5054,
"step": 1360
},
{
"epoch": 0.5622326339376655,
"grad_norm": 4.7720947265625,
"learning_rate": 8.239504473503099e-06,
"loss": 0.5599,
"step": 1380
},
{
"epoch": 0.5703809329802404,
"grad_norm": 6.338388442993164,
"learning_rate": 8.211975223675155e-06,
"loss": 0.5612,
"step": 1400
},
{
"epoch": 0.5785292320228153,
"grad_norm": 6.1034464836120605,
"learning_rate": 8.184445973847214e-06,
"loss": 0.5662,
"step": 1420
},
{
"epoch": 0.5866775310653901,
"grad_norm": 5.423385143280029,
"learning_rate": 8.15691672401927e-06,
"loss": 0.6246,
"step": 1440
},
{
"epoch": 0.594825830107965,
"grad_norm": 6.507946014404297,
"learning_rate": 8.12938747419133e-06,
"loss": 0.5879,
"step": 1460
},
{
"epoch": 0.6029741291505398,
"grad_norm": 5.927179336547852,
"learning_rate": 8.101858224363388e-06,
"loss": 0.5479,
"step": 1480
},
{
"epoch": 0.6111224281931147,
"grad_norm": 6.775645732879639,
"learning_rate": 8.074328974535444e-06,
"loss": 0.5809,
"step": 1500
},
{
"epoch": 0.6192707272356895,
"grad_norm": 4.263314247131348,
"learning_rate": 8.046799724707503e-06,
"loss": 0.55,
"step": 1520
},
{
"epoch": 0.6274190262782644,
"grad_norm": 4.671892166137695,
"learning_rate": 8.01927047487956e-06,
"loss": 0.529,
"step": 1540
},
{
"epoch": 0.6355673253208393,
"grad_norm": 5.7395501136779785,
"learning_rate": 7.991741225051618e-06,
"loss": 0.5349,
"step": 1560
},
{
"epoch": 0.6437156243634141,
"grad_norm": 5.88397216796875,
"learning_rate": 7.964211975223675e-06,
"loss": 0.5675,
"step": 1580
},
{
"epoch": 0.651863923405989,
"grad_norm": 5.037132263183594,
"learning_rate": 7.936682725395733e-06,
"loss": 0.5838,
"step": 1600
},
{
"epoch": 0.6600122224485638,
"grad_norm": 6.442205429077148,
"learning_rate": 7.909153475567792e-06,
"loss": 0.5275,
"step": 1620
},
{
"epoch": 0.6681605214911387,
"grad_norm": 4.626034259796143,
"learning_rate": 7.881624225739849e-06,
"loss": 0.5251,
"step": 1640
},
{
"epoch": 0.6763088205337136,
"grad_norm": 6.96089506149292,
"learning_rate": 7.854094975911907e-06,
"loss": 0.589,
"step": 1660
},
{
"epoch": 0.6844571195762884,
"grad_norm": 5.303503036499023,
"learning_rate": 7.826565726083964e-06,
"loss": 0.6233,
"step": 1680
},
{
"epoch": 0.6926054186188633,
"grad_norm": 6.015293121337891,
"learning_rate": 7.799036476256022e-06,
"loss": 0.5517,
"step": 1700
},
{
"epoch": 0.7007537176614381,
"grad_norm": 5.348663806915283,
"learning_rate": 7.77150722642808e-06,
"loss": 0.5403,
"step": 1720
},
{
"epoch": 0.708902016704013,
"grad_norm": 4.189513206481934,
"learning_rate": 7.743977976600138e-06,
"loss": 0.5817,
"step": 1740
},
{
"epoch": 0.717050315746588,
"grad_norm": 7.446457386016846,
"learning_rate": 7.716448726772196e-06,
"loss": 0.5437,
"step": 1760
},
{
"epoch": 0.7251986147891628,
"grad_norm": 4.759880542755127,
"learning_rate": 7.688919476944255e-06,
"loss": 0.5648,
"step": 1780
},
{
"epoch": 0.7333469138317377,
"grad_norm": 5.6719584465026855,
"learning_rate": 7.661390227116311e-06,
"loss": 0.6118,
"step": 1800
},
{
"epoch": 0.7414952128743125,
"grad_norm": 4.943815231323242,
"learning_rate": 7.63386097728837e-06,
"loss": 0.5326,
"step": 1820
},
{
"epoch": 0.7496435119168874,
"grad_norm": 3.6254968643188477,
"learning_rate": 7.6063317274604275e-06,
"loss": 0.5087,
"step": 1840
},
{
"epoch": 0.7577918109594622,
"grad_norm": 5.9594526290893555,
"learning_rate": 7.578802477632485e-06,
"loss": 0.532,
"step": 1860
},
{
"epoch": 0.7659401100020371,
"grad_norm": 5.103460311889648,
"learning_rate": 7.551273227804543e-06,
"loss": 0.553,
"step": 1880
},
{
"epoch": 0.774088409044612,
"grad_norm": 6.327749729156494,
"learning_rate": 7.5237439779766004e-06,
"loss": 0.5767,
"step": 1900
},
{
"epoch": 0.7822367080871868,
"grad_norm": 5.8910088539123535,
"learning_rate": 7.496214728148659e-06,
"loss": 0.5948,
"step": 1920
},
{
"epoch": 0.7903850071297617,
"grad_norm": 5.571481227874756,
"learning_rate": 7.4686854783207165e-06,
"loss": 0.5199,
"step": 1940
},
{
"epoch": 0.7985333061723365,
"grad_norm": 4.894184589385986,
"learning_rate": 7.441156228492774e-06,
"loss": 0.5577,
"step": 1960
},
{
"epoch": 0.8066816052149114,
"grad_norm": 5.402436256408691,
"learning_rate": 7.413626978664832e-06,
"loss": 0.5395,
"step": 1980
},
{
"epoch": 0.8148299042574862,
"grad_norm": 5.212170600891113,
"learning_rate": 7.3860977288368894e-06,
"loss": 0.5319,
"step": 2000
},
{
"epoch": 0.8148299042574862,
"eval_loss": 0.4680393934249878,
"eval_runtime": 10851.5751,
"eval_samples_per_second": 1.81,
"eval_steps_per_second": 0.226,
"eval_wer": 38.607193210094984,
"step": 2000
},
{
"epoch": 0.8229782033000611,
"grad_norm": 5.673357963562012,
"learning_rate": 7.358568479008947e-06,
"loss": 0.5641,
"step": 2020
},
{
"epoch": 0.831126502342636,
"grad_norm": 4.529426097869873,
"learning_rate": 7.331039229181005e-06,
"loss": 0.489,
"step": 2040
},
{
"epoch": 0.8392748013852108,
"grad_norm": 4.871108055114746,
"learning_rate": 7.303509979353063e-06,
"loss": 0.5413,
"step": 2060
},
{
"epoch": 0.8474231004277857,
"grad_norm": 4.407359600067139,
"learning_rate": 7.275980729525121e-06,
"loss": 0.4993,
"step": 2080
},
{
"epoch": 0.8555713994703605,
"grad_norm": 4.965864181518555,
"learning_rate": 7.248451479697178e-06,
"loss": 0.5825,
"step": 2100
},
{
"epoch": 0.8637196985129354,
"grad_norm": 6.825056552886963,
"learning_rate": 7.220922229869237e-06,
"loss": 0.5672,
"step": 2120
},
{
"epoch": 0.8718679975555103,
"grad_norm": 5.5648040771484375,
"learning_rate": 7.1933929800412945e-06,
"loss": 0.5188,
"step": 2140
},
{
"epoch": 0.8800162965980851,
"grad_norm": 6.5213799476623535,
"learning_rate": 7.165863730213353e-06,
"loss": 0.5806,
"step": 2160
},
{
"epoch": 0.88816459564066,
"grad_norm": 5.0149993896484375,
"learning_rate": 7.138334480385411e-06,
"loss": 0.5729,
"step": 2180
},
{
"epoch": 0.8963128946832348,
"grad_norm": 4.783459186553955,
"learning_rate": 7.110805230557468e-06,
"loss": 0.4991,
"step": 2200
},
{
"epoch": 0.9044611937258097,
"grad_norm": 5.186647891998291,
"learning_rate": 7.083275980729526e-06,
"loss": 0.6553,
"step": 2220
},
{
"epoch": 0.9126094927683845,
"grad_norm": 4.135760307312012,
"learning_rate": 7.0557467309015835e-06,
"loss": 0.5947,
"step": 2240
},
{
"epoch": 0.9207577918109595,
"grad_norm": 5.20528507232666,
"learning_rate": 7.028217481073641e-06,
"loss": 0.5454,
"step": 2260
},
{
"epoch": 0.9289060908535344,
"grad_norm": 5.948816299438477,
"learning_rate": 7.000688231245699e-06,
"loss": 0.5475,
"step": 2280
},
{
"epoch": 0.9370543898961092,
"grad_norm": 5.592283248901367,
"learning_rate": 6.973158981417757e-06,
"loss": 0.5153,
"step": 2300
},
{
"epoch": 0.9452026889386841,
"grad_norm": 5.162081241607666,
"learning_rate": 6.945629731589815e-06,
"loss": 0.4637,
"step": 2320
},
{
"epoch": 0.953350987981259,
"grad_norm": 5.888420104980469,
"learning_rate": 6.9181004817618725e-06,
"loss": 0.4956,
"step": 2340
},
{
"epoch": 0.9614992870238338,
"grad_norm": 6.354236602783203,
"learning_rate": 6.89057123193393e-06,
"loss": 0.4912,
"step": 2360
},
{
"epoch": 0.9696475860664087,
"grad_norm": 4.269996166229248,
"learning_rate": 6.863041982105988e-06,
"loss": 0.489,
"step": 2380
},
{
"epoch": 0.9777958851089835,
"grad_norm": 5.822803974151611,
"learning_rate": 6.835512732278045e-06,
"loss": 0.55,
"step": 2400
},
{
"epoch": 0.9859441841515584,
"grad_norm": 4.092251777648926,
"learning_rate": 6.807983482450103e-06,
"loss": 0.5625,
"step": 2420
},
{
"epoch": 0.9940924831941332,
"grad_norm": 6.759193420410156,
"learning_rate": 6.7804542326221615e-06,
"loss": 0.5363,
"step": 2440
},
{
"epoch": 1.0020370747606437,
"grad_norm": 5.126402378082275,
"learning_rate": 6.752924982794219e-06,
"loss": 0.535,
"step": 2460
},
{
"epoch": 1.0101853738032185,
"grad_norm": 5.261082172393799,
"learning_rate": 6.725395732966277e-06,
"loss": 0.4262,
"step": 2480
},
{
"epoch": 1.0183336728457935,
"grad_norm": 4.407663345336914,
"learning_rate": 6.697866483138335e-06,
"loss": 0.4076,
"step": 2500
},
{
"epoch": 1.0264819718883682,
"grad_norm": 5.785089492797852,
"learning_rate": 6.670337233310394e-06,
"loss": 0.4255,
"step": 2520
},
{
"epoch": 1.0346302709309432,
"grad_norm": 4.659370422363281,
"learning_rate": 6.642807983482451e-06,
"loss": 0.4116,
"step": 2540
},
{
"epoch": 1.042778569973518,
"grad_norm": 5.880095958709717,
"learning_rate": 6.615278733654509e-06,
"loss": 0.4943,
"step": 2560
},
{
"epoch": 1.050926869016093,
"grad_norm": 4.94119930267334,
"learning_rate": 6.587749483826567e-06,
"loss": 0.3904,
"step": 2580
},
{
"epoch": 1.0590751680586679,
"grad_norm": 4.008668422698975,
"learning_rate": 6.560220233998624e-06,
"loss": 0.403,
"step": 2600
},
{
"epoch": 1.0672234671012426,
"grad_norm": 6.079473495483398,
"learning_rate": 6.532690984170682e-06,
"loss": 0.4833,
"step": 2620
},
{
"epoch": 1.0753717661438176,
"grad_norm": 5.263147830963135,
"learning_rate": 6.5051617343427395e-06,
"loss": 0.4423,
"step": 2640
},
{
"epoch": 1.0835200651863923,
"grad_norm": 4.1324543952941895,
"learning_rate": 6.477632484514798e-06,
"loss": 0.4631,
"step": 2660
},
{
"epoch": 1.0916683642289673,
"grad_norm": 3.648576259613037,
"learning_rate": 6.450103234686856e-06,
"loss": 0.4002,
"step": 2680
},
{
"epoch": 1.099816663271542,
"grad_norm": 5.458010673522949,
"learning_rate": 6.422573984858913e-06,
"loss": 0.4572,
"step": 2700
},
{
"epoch": 1.107964962314117,
"grad_norm": 5.379840850830078,
"learning_rate": 6.395044735030971e-06,
"loss": 0.4283,
"step": 2720
},
{
"epoch": 1.1161132613566918,
"grad_norm": 5.2092108726501465,
"learning_rate": 6.3675154852030285e-06,
"loss": 0.4384,
"step": 2740
},
{
"epoch": 1.1242615603992667,
"grad_norm": 5.582503795623779,
"learning_rate": 6.339986235375086e-06,
"loss": 0.4202,
"step": 2760
},
{
"epoch": 1.1324098594418415,
"grad_norm": 4.186822891235352,
"learning_rate": 6.312456985547144e-06,
"loss": 0.3972,
"step": 2780
},
{
"epoch": 1.1405581584844164,
"grad_norm": 5.029235363006592,
"learning_rate": 6.284927735719201e-06,
"loss": 0.4368,
"step": 2800
},
{
"epoch": 1.1487064575269912,
"grad_norm": 4.080201148986816,
"learning_rate": 6.25739848589126e-06,
"loss": 0.4561,
"step": 2820
},
{
"epoch": 1.1568547565695662,
"grad_norm": 5.171266078948975,
"learning_rate": 6.2298692360633175e-06,
"loss": 0.4594,
"step": 2840
},
{
"epoch": 1.165003055612141,
"grad_norm": 6.01788854598999,
"learning_rate": 6.202339986235376e-06,
"loss": 0.363,
"step": 2860
},
{
"epoch": 1.1731513546547159,
"grad_norm": 5.374858379364014,
"learning_rate": 6.1748107364074336e-06,
"loss": 0.4782,
"step": 2880
},
{
"epoch": 1.1812996536972906,
"grad_norm": 4.696728706359863,
"learning_rate": 6.147281486579492e-06,
"loss": 0.4598,
"step": 2900
},
{
"epoch": 1.1894479527398656,
"grad_norm": 4.785463333129883,
"learning_rate": 6.11975223675155e-06,
"loss": 0.4502,
"step": 2920
},
{
"epoch": 1.1975962517824403,
"grad_norm": 4.824402809143066,
"learning_rate": 6.092222986923607e-06,
"loss": 0.4216,
"step": 2940
},
{
"epoch": 1.2057445508250153,
"grad_norm": 4.554792404174805,
"learning_rate": 6.064693737095665e-06,
"loss": 0.39,
"step": 2960
},
{
"epoch": 1.21389284986759,
"grad_norm": 5.259772300720215,
"learning_rate": 6.0371644872677226e-06,
"loss": 0.4199,
"step": 2980
},
{
"epoch": 1.222041148910165,
"grad_norm": 4.8776140213012695,
"learning_rate": 6.00963523743978e-06,
"loss": 0.4742,
"step": 3000
},
{
"epoch": 1.23018944795274,
"grad_norm": 5.5702033042907715,
"learning_rate": 5.982105987611838e-06,
"loss": 0.3934,
"step": 3020
},
{
"epoch": 1.2383377469953147,
"grad_norm": 5.325311660766602,
"learning_rate": 5.954576737783896e-06,
"loss": 0.3655,
"step": 3040
},
{
"epoch": 1.2464860460378895,
"grad_norm": 3.9275801181793213,
"learning_rate": 5.927047487955954e-06,
"loss": 0.3965,
"step": 3060
},
{
"epoch": 1.2546343450804645,
"grad_norm": 4.71364688873291,
"learning_rate": 5.8995182381280116e-06,
"loss": 0.4026,
"step": 3080
},
{
"epoch": 1.2627826441230394,
"grad_norm": 4.507187843322754,
"learning_rate": 5.871988988300069e-06,
"loss": 0.4004,
"step": 3100
},
{
"epoch": 1.2709309431656142,
"grad_norm": 4.939427375793457,
"learning_rate": 5.844459738472127e-06,
"loss": 0.407,
"step": 3120
},
{
"epoch": 1.279079242208189,
"grad_norm": 5.642360210418701,
"learning_rate": 5.8169304886441844e-06,
"loss": 0.4273,
"step": 3140
},
{
"epoch": 1.2872275412507639,
"grad_norm": 4.531342506408691,
"learning_rate": 5.789401238816242e-06,
"loss": 0.4455,
"step": 3160
},
{
"epoch": 1.2953758402933389,
"grad_norm": 5.926376819610596,
"learning_rate": 5.7618719889883005e-06,
"loss": 0.4627,
"step": 3180
},
{
"epoch": 1.3035241393359136,
"grad_norm": 3.818305730819702,
"learning_rate": 5.734342739160358e-06,
"loss": 0.3957,
"step": 3200
},
{
"epoch": 1.3116724383784886,
"grad_norm": 4.200014114379883,
"learning_rate": 5.706813489332416e-06,
"loss": 0.4516,
"step": 3220
},
{
"epoch": 1.3198207374210633,
"grad_norm": 5.380039691925049,
"learning_rate": 5.679284239504474e-06,
"loss": 0.4775,
"step": 3240
},
{
"epoch": 1.3279690364636383,
"grad_norm": 6.382922172546387,
"learning_rate": 5.651754989676532e-06,
"loss": 0.3792,
"step": 3260
},
{
"epoch": 1.336117335506213,
"grad_norm": 5.670558929443359,
"learning_rate": 5.62422573984859e-06,
"loss": 0.4301,
"step": 3280
},
{
"epoch": 1.344265634548788,
"grad_norm": 4.901285171508789,
"learning_rate": 5.596696490020648e-06,
"loss": 0.4095,
"step": 3300
},
{
"epoch": 1.3524139335913627,
"grad_norm": 5.272710800170898,
"learning_rate": 5.569167240192706e-06,
"loss": 0.3585,
"step": 3320
},
{
"epoch": 1.3605622326339377,
"grad_norm": 5.442037582397461,
"learning_rate": 5.541637990364763e-06,
"loss": 0.4019,
"step": 3340
},
{
"epoch": 1.3687105316765125,
"grad_norm": 4.817733287811279,
"learning_rate": 5.514108740536821e-06,
"loss": 0.368,
"step": 3360
},
{
"epoch": 1.3768588307190874,
"grad_norm": 4.459897994995117,
"learning_rate": 5.4865794907088785e-06,
"loss": 0.3646,
"step": 3380
},
{
"epoch": 1.3850071297616622,
"grad_norm": 5.927896976470947,
"learning_rate": 5.459050240880936e-06,
"loss": 0.4448,
"step": 3400
},
{
"epoch": 1.3931554288042372,
"grad_norm": 5.564925193786621,
"learning_rate": 5.431520991052995e-06,
"loss": 0.3794,
"step": 3420
},
{
"epoch": 1.401303727846812,
"grad_norm": 4.68287467956543,
"learning_rate": 5.403991741225052e-06,
"loss": 0.3931,
"step": 3440
},
{
"epoch": 1.4094520268893869,
"grad_norm": 5.004433631896973,
"learning_rate": 5.37646249139711e-06,
"loss": 0.432,
"step": 3460
},
{
"epoch": 1.4176003259319616,
"grad_norm": 5.418098449707031,
"learning_rate": 5.3489332415691675e-06,
"loss": 0.4533,
"step": 3480
},
{
"epoch": 1.4257486249745366,
"grad_norm": 4.825730800628662,
"learning_rate": 5.321403991741225e-06,
"loss": 0.4159,
"step": 3500
},
{
"epoch": 1.4338969240171116,
"grad_norm": 5.892017364501953,
"learning_rate": 5.293874741913283e-06,
"loss": 0.394,
"step": 3520
},
{
"epoch": 1.4420452230596863,
"grad_norm": 5.600419521331787,
"learning_rate": 5.26634549208534e-06,
"loss": 0.4357,
"step": 3540
},
{
"epoch": 1.450193522102261,
"grad_norm": 4.615939617156982,
"learning_rate": 5.238816242257399e-06,
"loss": 0.4029,
"step": 3560
},
{
"epoch": 1.458341821144836,
"grad_norm": 4.384542465209961,
"learning_rate": 5.2112869924294565e-06,
"loss": 0.4042,
"step": 3580
},
{
"epoch": 1.466490120187411,
"grad_norm": 5.981232643127441,
"learning_rate": 5.183757742601514e-06,
"loss": 0.3923,
"step": 3600
},
{
"epoch": 1.4746384192299857,
"grad_norm": 5.077232837677002,
"learning_rate": 5.156228492773573e-06,
"loss": 0.4995,
"step": 3620
},
{
"epoch": 1.4827867182725605,
"grad_norm": 5.000918865203857,
"learning_rate": 5.12869924294563e-06,
"loss": 0.4167,
"step": 3640
},
{
"epoch": 1.4909350173151354,
"grad_norm": 2.953489303588867,
"learning_rate": 5.101169993117689e-06,
"loss": 0.3798,
"step": 3660
},
{
"epoch": 1.4990833163577104,
"grad_norm": 5.880030632019043,
"learning_rate": 5.073640743289746e-06,
"loss": 0.4349,
"step": 3680
},
{
"epoch": 1.5072316154002852,
"grad_norm": 5.726639270782471,
"learning_rate": 5.046111493461804e-06,
"loss": 0.3809,
"step": 3700
},
{
"epoch": 1.51537991444286,
"grad_norm": 4.755875587463379,
"learning_rate": 5.018582243633862e-06,
"loss": 0.4345,
"step": 3720
},
{
"epoch": 1.5235282134854349,
"grad_norm": 3.825904130935669,
"learning_rate": 4.991052993805919e-06,
"loss": 0.4304,
"step": 3740
},
{
"epoch": 1.5316765125280098,
"grad_norm": 6.1705217361450195,
"learning_rate": 4.963523743977977e-06,
"loss": 0.4224,
"step": 3760
},
{
"epoch": 1.5398248115705846,
"grad_norm": 5.098936557769775,
"learning_rate": 4.9359944941500345e-06,
"loss": 0.4254,
"step": 3780
},
{
"epoch": 1.5479731106131593,
"grad_norm": 4.633610248565674,
"learning_rate": 4.908465244322093e-06,
"loss": 0.3542,
"step": 3800
},
{
"epoch": 1.5561214096557343,
"grad_norm": 5.469082832336426,
"learning_rate": 4.880935994494151e-06,
"loss": 0.3577,
"step": 3820
},
{
"epoch": 1.5642697086983093,
"grad_norm": 4.476110458374023,
"learning_rate": 4.853406744666208e-06,
"loss": 0.3931,
"step": 3840
},
{
"epoch": 1.5724180077408842,
"grad_norm": 4.693641185760498,
"learning_rate": 4.825877494838266e-06,
"loss": 0.3793,
"step": 3860
},
{
"epoch": 1.580566306783459,
"grad_norm": 5.025706768035889,
"learning_rate": 4.7983482450103235e-06,
"loss": 0.4152,
"step": 3880
},
{
"epoch": 1.5887146058260337,
"grad_norm": 5.075003147125244,
"learning_rate": 4.770818995182382e-06,
"loss": 0.3813,
"step": 3900
},
{
"epoch": 1.5968629048686087,
"grad_norm": 4.8126115798950195,
"learning_rate": 4.74328974535444e-06,
"loss": 0.408,
"step": 3920
},
{
"epoch": 1.6050112039111837,
"grad_norm": 4.3969340324401855,
"learning_rate": 4.715760495526497e-06,
"loss": 0.3667,
"step": 3940
},
{
"epoch": 1.6131595029537584,
"grad_norm": 4.514830589294434,
"learning_rate": 4.688231245698555e-06,
"loss": 0.4022,
"step": 3960
},
{
"epoch": 1.6213078019963332,
"grad_norm": 5.9185309410095215,
"learning_rate": 4.6607019958706125e-06,
"loss": 0.4277,
"step": 3980
},
{
"epoch": 1.6294561010389081,
"grad_norm": 4.446202754974365,
"learning_rate": 4.633172746042671e-06,
"loss": 0.4179,
"step": 4000
},
{
"epoch": 1.6294561010389081,
"eval_loss": 0.3749663531780243,
"eval_runtime": 13864.4556,
"eval_samples_per_second": 1.416,
"eval_steps_per_second": 0.177,
"eval_wer": 60.07223542231883,
"step": 4000
}
],
"logging_steps": 20,
"max_steps": 7365,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 2000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3.69331595329536e+19,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}