got / trainer_state.json
amelia134's picture
Add files using upload-large-folder tool
1745bee verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 180774,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"learning_rate": 1.9944682310509254e-05,
"loss": 1.5408,
"step": 500
},
{
"epoch": 0.02,
"learning_rate": 1.988936462101851e-05,
"loss": 1.2376,
"step": 1000
},
{
"epoch": 0.02,
"learning_rate": 1.9834046931527766e-05,
"loss": 1.1307,
"step": 1500
},
{
"epoch": 0.03,
"learning_rate": 1.977872924203702e-05,
"loss": 1.103,
"step": 2000
},
{
"epoch": 0.04,
"learning_rate": 1.9723411552546275e-05,
"loss": 1.0708,
"step": 2500
},
{
"epoch": 0.05,
"learning_rate": 1.9668093863055527e-05,
"loss": 1.0288,
"step": 3000
},
{
"epoch": 0.06,
"learning_rate": 1.9612776173564783e-05,
"loss": 1.0089,
"step": 3500
},
{
"epoch": 0.07,
"learning_rate": 1.955745848407404e-05,
"loss": 1.0096,
"step": 4000
},
{
"epoch": 0.07,
"learning_rate": 1.9502140794583292e-05,
"loss": 1.0213,
"step": 4500
},
{
"epoch": 0.08,
"learning_rate": 1.9446823105092548e-05,
"loss": 0.9962,
"step": 5000
},
{
"epoch": 0.09,
"learning_rate": 1.9391505415601804e-05,
"loss": 0.9709,
"step": 5500
},
{
"epoch": 0.1,
"learning_rate": 1.9336187726111056e-05,
"loss": 0.9898,
"step": 6000
},
{
"epoch": 0.11,
"learning_rate": 1.9280870036620312e-05,
"loss": 0.9536,
"step": 6500
},
{
"epoch": 0.12,
"learning_rate": 1.9225552347129565e-05,
"loss": 0.9591,
"step": 7000
},
{
"epoch": 0.12,
"learning_rate": 1.917023465763882e-05,
"loss": 0.9851,
"step": 7500
},
{
"epoch": 0.13,
"learning_rate": 1.9114916968148077e-05,
"loss": 0.9548,
"step": 8000
},
{
"epoch": 0.14,
"learning_rate": 1.905959927865733e-05,
"loss": 1.0179,
"step": 8500
},
{
"epoch": 0.15,
"learning_rate": 1.9004281589166585e-05,
"loss": 0.9737,
"step": 9000
},
{
"epoch": 0.16,
"learning_rate": 1.894896389967584e-05,
"loss": 0.9592,
"step": 9500
},
{
"epoch": 0.17,
"learning_rate": 1.8893646210185094e-05,
"loss": 0.9488,
"step": 10000
},
{
"epoch": 0.17,
"learning_rate": 1.883832852069435e-05,
"loss": 0.9598,
"step": 10500
},
{
"epoch": 0.18,
"learning_rate": 1.8783010831203603e-05,
"loss": 0.9572,
"step": 11000
},
{
"epoch": 0.19,
"learning_rate": 1.872769314171286e-05,
"loss": 0.9789,
"step": 11500
},
{
"epoch": 0.2,
"learning_rate": 1.8672375452222115e-05,
"loss": 0.9503,
"step": 12000
},
{
"epoch": 0.21,
"learning_rate": 1.8617057762731367e-05,
"loss": 0.9296,
"step": 12500
},
{
"epoch": 0.22,
"learning_rate": 1.8561740073240623e-05,
"loss": 0.933,
"step": 13000
},
{
"epoch": 0.22,
"learning_rate": 1.850642238374988e-05,
"loss": 0.9438,
"step": 13500
},
{
"epoch": 0.23,
"learning_rate": 1.845110469425913e-05,
"loss": 0.919,
"step": 14000
},
{
"epoch": 0.24,
"learning_rate": 1.8395787004768388e-05,
"loss": 0.9704,
"step": 14500
},
{
"epoch": 0.25,
"learning_rate": 1.834046931527764e-05,
"loss": 0.9099,
"step": 15000
},
{
"epoch": 0.26,
"learning_rate": 1.8285151625786896e-05,
"loss": 0.9517,
"step": 15500
},
{
"epoch": 0.27,
"learning_rate": 1.8229833936296152e-05,
"loss": 0.9463,
"step": 16000
},
{
"epoch": 0.27,
"learning_rate": 1.8174516246805405e-05,
"loss": 0.9535,
"step": 16500
},
{
"epoch": 0.28,
"learning_rate": 1.8119198557314657e-05,
"loss": 0.9372,
"step": 17000
},
{
"epoch": 0.29,
"learning_rate": 1.8063880867823917e-05,
"loss": 0.9515,
"step": 17500
},
{
"epoch": 0.3,
"learning_rate": 1.800856317833317e-05,
"loss": 0.9675,
"step": 18000
},
{
"epoch": 0.31,
"learning_rate": 1.7953245488842422e-05,
"loss": 0.9579,
"step": 18500
},
{
"epoch": 0.32,
"learning_rate": 1.7897927799351678e-05,
"loss": 0.961,
"step": 19000
},
{
"epoch": 0.32,
"learning_rate": 1.7842610109860934e-05,
"loss": 0.9397,
"step": 19500
},
{
"epoch": 0.33,
"learning_rate": 1.7787292420370186e-05,
"loss": 0.9518,
"step": 20000
},
{
"epoch": 0.34,
"learning_rate": 1.7731974730879442e-05,
"loss": 0.9745,
"step": 20500
},
{
"epoch": 0.35,
"learning_rate": 1.7676657041388695e-05,
"loss": 0.8979,
"step": 21000
},
{
"epoch": 0.36,
"learning_rate": 1.762133935189795e-05,
"loss": 0.9224,
"step": 21500
},
{
"epoch": 0.37,
"learning_rate": 1.7566021662407207e-05,
"loss": 0.9556,
"step": 22000
},
{
"epoch": 0.37,
"learning_rate": 1.751070397291646e-05,
"loss": 0.978,
"step": 22500
},
{
"epoch": 0.38,
"learning_rate": 1.7455386283425715e-05,
"loss": 0.9102,
"step": 23000
},
{
"epoch": 0.39,
"learning_rate": 1.7400068593934968e-05,
"loss": 0.9124,
"step": 23500
},
{
"epoch": 0.4,
"learning_rate": 1.7344750904444224e-05,
"loss": 0.9089,
"step": 24000
},
{
"epoch": 0.41,
"learning_rate": 1.728943321495348e-05,
"loss": 0.9296,
"step": 24500
},
{
"epoch": 0.41,
"learning_rate": 1.7234115525462733e-05,
"loss": 0.9406,
"step": 25000
},
{
"epoch": 0.42,
"learning_rate": 1.717879783597199e-05,
"loss": 0.9625,
"step": 25500
},
{
"epoch": 0.43,
"learning_rate": 1.7123480146481245e-05,
"loss": 0.9196,
"step": 26000
},
{
"epoch": 0.44,
"learning_rate": 1.7068162456990497e-05,
"loss": 0.9402,
"step": 26500
},
{
"epoch": 0.45,
"learning_rate": 1.7012844767499753e-05,
"loss": 0.977,
"step": 27000
},
{
"epoch": 0.46,
"learning_rate": 1.6957527078009006e-05,
"loss": 0.9082,
"step": 27500
},
{
"epoch": 0.46,
"learning_rate": 1.690220938851826e-05,
"loss": 0.937,
"step": 28000
},
{
"epoch": 0.47,
"learning_rate": 1.6846891699027518e-05,
"loss": 0.8986,
"step": 28500
},
{
"epoch": 0.48,
"learning_rate": 1.679157400953677e-05,
"loss": 0.9493,
"step": 29000
},
{
"epoch": 0.49,
"learning_rate": 1.6736256320046026e-05,
"loss": 0.9245,
"step": 29500
},
{
"epoch": 0.5,
"learning_rate": 1.6680938630555282e-05,
"loss": 0.8917,
"step": 30000
},
{
"epoch": 0.51,
"learning_rate": 1.6625620941064535e-05,
"loss": 0.9436,
"step": 30500
},
{
"epoch": 0.51,
"learning_rate": 1.657030325157379e-05,
"loss": 0.9,
"step": 31000
},
{
"epoch": 0.52,
"learning_rate": 1.6514985562083043e-05,
"loss": 0.9481,
"step": 31500
},
{
"epoch": 0.53,
"learning_rate": 1.64596678725923e-05,
"loss": 0.9146,
"step": 32000
},
{
"epoch": 0.54,
"learning_rate": 1.6404350183101555e-05,
"loss": 0.9303,
"step": 32500
},
{
"epoch": 0.55,
"learning_rate": 1.6349032493610808e-05,
"loss": 0.9305,
"step": 33000
},
{
"epoch": 0.56,
"learning_rate": 1.629371480412006e-05,
"loss": 0.9222,
"step": 33500
},
{
"epoch": 0.56,
"learning_rate": 1.623839711462932e-05,
"loss": 0.9495,
"step": 34000
},
{
"epoch": 0.57,
"learning_rate": 1.6183079425138572e-05,
"loss": 0.8726,
"step": 34500
},
{
"epoch": 0.58,
"learning_rate": 1.6127761735647825e-05,
"loss": 0.9479,
"step": 35000
},
{
"epoch": 0.59,
"learning_rate": 1.607244404615708e-05,
"loss": 0.9395,
"step": 35500
},
{
"epoch": 0.6,
"learning_rate": 1.6017126356666337e-05,
"loss": 0.9261,
"step": 36000
},
{
"epoch": 0.61,
"learning_rate": 1.596180866717559e-05,
"loss": 0.8737,
"step": 36500
},
{
"epoch": 0.61,
"learning_rate": 1.5906490977684845e-05,
"loss": 0.91,
"step": 37000
},
{
"epoch": 0.62,
"learning_rate": 1.5851173288194098e-05,
"loss": 0.9309,
"step": 37500
},
{
"epoch": 0.63,
"learning_rate": 1.5795855598703354e-05,
"loss": 0.9278,
"step": 38000
},
{
"epoch": 0.64,
"learning_rate": 1.574053790921261e-05,
"loss": 0.9506,
"step": 38500
},
{
"epoch": 0.65,
"learning_rate": 1.5685220219721863e-05,
"loss": 0.8787,
"step": 39000
},
{
"epoch": 0.66,
"learning_rate": 1.562990253023112e-05,
"loss": 0.9097,
"step": 39500
},
{
"epoch": 0.66,
"learning_rate": 1.557458484074037e-05,
"loss": 0.8812,
"step": 40000
},
{
"epoch": 0.67,
"learning_rate": 1.5519267151249627e-05,
"loss": 0.8798,
"step": 40500
},
{
"epoch": 0.68,
"learning_rate": 1.5463949461758883e-05,
"loss": 0.9398,
"step": 41000
},
{
"epoch": 0.69,
"learning_rate": 1.5408631772268136e-05,
"loss": 0.9148,
"step": 41500
},
{
"epoch": 0.7,
"learning_rate": 1.535331408277739e-05,
"loss": 0.9398,
"step": 42000
},
{
"epoch": 0.71,
"learning_rate": 1.5297996393286648e-05,
"loss": 0.8984,
"step": 42500
},
{
"epoch": 0.71,
"learning_rate": 1.5242678703795902e-05,
"loss": 0.8947,
"step": 43000
},
{
"epoch": 0.72,
"learning_rate": 1.5187361014305155e-05,
"loss": 0.8767,
"step": 43500
},
{
"epoch": 0.73,
"learning_rate": 1.5132043324814409e-05,
"loss": 0.906,
"step": 44000
},
{
"epoch": 0.74,
"learning_rate": 1.5076725635323666e-05,
"loss": 0.9188,
"step": 44500
},
{
"epoch": 0.75,
"learning_rate": 1.5021407945832919e-05,
"loss": 0.9534,
"step": 45000
},
{
"epoch": 0.76,
"learning_rate": 1.4966090256342173e-05,
"loss": 0.913,
"step": 45500
},
{
"epoch": 0.76,
"learning_rate": 1.4910772566851428e-05,
"loss": 0.8824,
"step": 46000
},
{
"epoch": 0.77,
"learning_rate": 1.4855454877360684e-05,
"loss": 0.9155,
"step": 46500
},
{
"epoch": 0.78,
"learning_rate": 1.4800137187869938e-05,
"loss": 0.9142,
"step": 47000
},
{
"epoch": 0.79,
"learning_rate": 1.4744819498379192e-05,
"loss": 0.9195,
"step": 47500
},
{
"epoch": 0.8,
"learning_rate": 1.4689501808888446e-05,
"loss": 0.9207,
"step": 48000
},
{
"epoch": 0.8,
"learning_rate": 1.4634184119397702e-05,
"loss": 0.9057,
"step": 48500
},
{
"epoch": 0.81,
"learning_rate": 1.4578866429906957e-05,
"loss": 0.9273,
"step": 49000
},
{
"epoch": 0.82,
"learning_rate": 1.4523548740416211e-05,
"loss": 0.9333,
"step": 49500
},
{
"epoch": 0.83,
"learning_rate": 1.4468231050925465e-05,
"loss": 0.8996,
"step": 50000
},
{
"epoch": 0.84,
"learning_rate": 1.4412913361434721e-05,
"loss": 0.9298,
"step": 50500
},
{
"epoch": 0.85,
"learning_rate": 1.4357595671943975e-05,
"loss": 0.933,
"step": 51000
},
{
"epoch": 0.85,
"learning_rate": 1.430227798245323e-05,
"loss": 0.8895,
"step": 51500
},
{
"epoch": 0.86,
"learning_rate": 1.4246960292962484e-05,
"loss": 0.9297,
"step": 52000
},
{
"epoch": 0.87,
"learning_rate": 1.419164260347174e-05,
"loss": 0.9256,
"step": 52500
},
{
"epoch": 0.88,
"learning_rate": 1.4136324913980994e-05,
"loss": 0.9088,
"step": 53000
},
{
"epoch": 0.89,
"learning_rate": 1.4081007224490249e-05,
"loss": 0.9162,
"step": 53500
},
{
"epoch": 0.9,
"learning_rate": 1.4025689534999503e-05,
"loss": 0.9249,
"step": 54000
},
{
"epoch": 0.9,
"learning_rate": 1.3970371845508759e-05,
"loss": 0.9561,
"step": 54500
},
{
"epoch": 0.91,
"learning_rate": 1.3915054156018013e-05,
"loss": 0.9376,
"step": 55000
},
{
"epoch": 0.92,
"learning_rate": 1.3859736466527267e-05,
"loss": 0.9404,
"step": 55500
},
{
"epoch": 0.93,
"learning_rate": 1.3804418777036522e-05,
"loss": 0.9654,
"step": 56000
},
{
"epoch": 0.94,
"learning_rate": 1.3749101087545778e-05,
"loss": 0.8642,
"step": 56500
},
{
"epoch": 0.95,
"learning_rate": 1.3693783398055032e-05,
"loss": 0.9651,
"step": 57000
},
{
"epoch": 0.95,
"learning_rate": 1.3638465708564286e-05,
"loss": 0.928,
"step": 57500
},
{
"epoch": 0.96,
"learning_rate": 1.358314801907354e-05,
"loss": 0.9215,
"step": 58000
},
{
"epoch": 0.97,
"learning_rate": 1.3527830329582796e-05,
"loss": 0.8917,
"step": 58500
},
{
"epoch": 0.98,
"learning_rate": 1.347251264009205e-05,
"loss": 0.8971,
"step": 59000
},
{
"epoch": 0.99,
"learning_rate": 1.3417194950601305e-05,
"loss": 0.9151,
"step": 59500
},
{
"epoch": 1.0,
"learning_rate": 1.336187726111056e-05,
"loss": 0.8999,
"step": 60000
},
{
"epoch": 1.0,
"learning_rate": 1.3306559571619812e-05,
"loss": 0.9411,
"step": 60500
},
{
"epoch": 1.01,
"learning_rate": 1.325124188212907e-05,
"loss": 0.9059,
"step": 61000
},
{
"epoch": 1.02,
"learning_rate": 1.3195924192638324e-05,
"loss": 0.8741,
"step": 61500
},
{
"epoch": 1.03,
"learning_rate": 1.3140606503147576e-05,
"loss": 0.93,
"step": 62000
},
{
"epoch": 1.04,
"learning_rate": 1.308528881365683e-05,
"loss": 0.9157,
"step": 62500
},
{
"epoch": 1.05,
"learning_rate": 1.3029971124166088e-05,
"loss": 0.8834,
"step": 63000
},
{
"epoch": 1.05,
"learning_rate": 1.2974653434675341e-05,
"loss": 0.9047,
"step": 63500
},
{
"epoch": 1.06,
"learning_rate": 1.2919335745184595e-05,
"loss": 0.9396,
"step": 64000
},
{
"epoch": 1.07,
"learning_rate": 1.286401805569385e-05,
"loss": 0.9448,
"step": 64500
},
{
"epoch": 1.08,
"learning_rate": 1.2808700366203106e-05,
"loss": 0.9332,
"step": 65000
},
{
"epoch": 1.09,
"learning_rate": 1.275338267671236e-05,
"loss": 0.9394,
"step": 65500
},
{
"epoch": 1.1,
"learning_rate": 1.2698064987221614e-05,
"loss": 0.9217,
"step": 66000
},
{
"epoch": 1.1,
"learning_rate": 1.2642747297730868e-05,
"loss": 0.893,
"step": 66500
},
{
"epoch": 1.11,
"learning_rate": 1.2587429608240124e-05,
"loss": 0.8987,
"step": 67000
},
{
"epoch": 1.12,
"learning_rate": 1.2532111918749379e-05,
"loss": 0.8831,
"step": 67500
},
{
"epoch": 1.13,
"learning_rate": 1.2476794229258633e-05,
"loss": 0.9192,
"step": 68000
},
{
"epoch": 1.14,
"learning_rate": 1.2421476539767887e-05,
"loss": 0.9175,
"step": 68500
},
{
"epoch": 1.15,
"learning_rate": 1.2366158850277143e-05,
"loss": 0.9207,
"step": 69000
},
{
"epoch": 1.15,
"learning_rate": 1.2310841160786397e-05,
"loss": 0.8792,
"step": 69500
},
{
"epoch": 1.16,
"learning_rate": 1.2255523471295652e-05,
"loss": 0.9052,
"step": 70000
},
{
"epoch": 1.17,
"learning_rate": 1.2200205781804906e-05,
"loss": 0.9362,
"step": 70500
},
{
"epoch": 1.18,
"learning_rate": 1.2144888092314162e-05,
"loss": 0.8689,
"step": 71000
},
{
"epoch": 1.19,
"learning_rate": 1.2089570402823416e-05,
"loss": 0.8964,
"step": 71500
},
{
"epoch": 1.19,
"learning_rate": 1.203425271333267e-05,
"loss": 0.9057,
"step": 72000
},
{
"epoch": 1.2,
"learning_rate": 1.1978935023841925e-05,
"loss": 0.9527,
"step": 72500
},
{
"epoch": 1.21,
"learning_rate": 1.192361733435118e-05,
"loss": 0.9061,
"step": 73000
},
{
"epoch": 1.22,
"learning_rate": 1.1868299644860435e-05,
"loss": 0.9662,
"step": 73500
},
{
"epoch": 1.23,
"learning_rate": 1.181298195536969e-05,
"loss": 0.9286,
"step": 74000
},
{
"epoch": 1.24,
"learning_rate": 1.1757664265878944e-05,
"loss": 0.953,
"step": 74500
},
{
"epoch": 1.24,
"learning_rate": 1.17023465763882e-05,
"loss": 0.9109,
"step": 75000
},
{
"epoch": 1.25,
"learning_rate": 1.1647028886897454e-05,
"loss": 0.9167,
"step": 75500
},
{
"epoch": 1.26,
"learning_rate": 1.1591711197406708e-05,
"loss": 0.9382,
"step": 76000
},
{
"epoch": 1.27,
"learning_rate": 1.1536393507915962e-05,
"loss": 0.9144,
"step": 76500
},
{
"epoch": 1.28,
"learning_rate": 1.1481075818425218e-05,
"loss": 0.8935,
"step": 77000
},
{
"epoch": 1.29,
"learning_rate": 1.1425758128934473e-05,
"loss": 0.9181,
"step": 77500
},
{
"epoch": 1.29,
"learning_rate": 1.1370440439443727e-05,
"loss": 0.9167,
"step": 78000
},
{
"epoch": 1.3,
"learning_rate": 1.131512274995298e-05,
"loss": 0.8912,
"step": 78500
},
{
"epoch": 1.31,
"learning_rate": 1.1259805060462234e-05,
"loss": 0.9078,
"step": 79000
},
{
"epoch": 1.32,
"learning_rate": 1.1204487370971491e-05,
"loss": 0.9164,
"step": 79500
},
{
"epoch": 1.33,
"learning_rate": 1.1149169681480744e-05,
"loss": 0.9178,
"step": 80000
},
{
"epoch": 1.34,
"learning_rate": 1.1093851991989998e-05,
"loss": 0.8802,
"step": 80500
},
{
"epoch": 1.34,
"learning_rate": 1.1038534302499253e-05,
"loss": 0.8861,
"step": 81000
},
{
"epoch": 1.35,
"learning_rate": 1.0983216613008509e-05,
"loss": 0.9514,
"step": 81500
},
{
"epoch": 1.36,
"learning_rate": 1.0927898923517763e-05,
"loss": 0.9349,
"step": 82000
},
{
"epoch": 1.37,
"learning_rate": 1.0872581234027017e-05,
"loss": 0.8864,
"step": 82500
},
{
"epoch": 1.38,
"learning_rate": 1.0817263544536271e-05,
"loss": 0.9031,
"step": 83000
},
{
"epoch": 1.39,
"learning_rate": 1.0761945855045527e-05,
"loss": 0.9202,
"step": 83500
},
{
"epoch": 1.39,
"learning_rate": 1.0706628165554782e-05,
"loss": 0.964,
"step": 84000
},
{
"epoch": 1.4,
"learning_rate": 1.0651310476064036e-05,
"loss": 0.8934,
"step": 84500
},
{
"epoch": 1.41,
"learning_rate": 1.059599278657329e-05,
"loss": 0.8834,
"step": 85000
},
{
"epoch": 1.42,
"learning_rate": 1.0540675097082546e-05,
"loss": 0.9234,
"step": 85500
},
{
"epoch": 1.43,
"learning_rate": 1.04853574075918e-05,
"loss": 0.9243,
"step": 86000
},
{
"epoch": 1.44,
"learning_rate": 1.0430039718101055e-05,
"loss": 0.9032,
"step": 86500
},
{
"epoch": 1.44,
"learning_rate": 1.0374722028610309e-05,
"loss": 0.8856,
"step": 87000
},
{
"epoch": 1.45,
"learning_rate": 1.0319404339119565e-05,
"loss": 0.918,
"step": 87500
},
{
"epoch": 1.46,
"learning_rate": 1.026408664962882e-05,
"loss": 0.9737,
"step": 88000
},
{
"epoch": 1.47,
"learning_rate": 1.0208768960138074e-05,
"loss": 0.8813,
"step": 88500
},
{
"epoch": 1.48,
"learning_rate": 1.0153451270647328e-05,
"loss": 0.8957,
"step": 89000
},
{
"epoch": 1.49,
"learning_rate": 1.0098133581156584e-05,
"loss": 0.9095,
"step": 89500
},
{
"epoch": 1.49,
"learning_rate": 1.0042815891665838e-05,
"loss": 0.9336,
"step": 90000
},
{
"epoch": 1.5,
"learning_rate": 9.987498202175092e-06,
"loss": 0.8921,
"step": 90500
},
{
"epoch": 1.51,
"learning_rate": 9.932180512684347e-06,
"loss": 0.8593,
"step": 91000
},
{
"epoch": 1.52,
"learning_rate": 9.876862823193601e-06,
"loss": 0.9258,
"step": 91500
},
{
"epoch": 1.53,
"learning_rate": 9.821545133702857e-06,
"loss": 0.927,
"step": 92000
},
{
"epoch": 1.54,
"learning_rate": 9.766227444212111e-06,
"loss": 0.9453,
"step": 92500
},
{
"epoch": 1.54,
"learning_rate": 9.710909754721366e-06,
"loss": 0.9271,
"step": 93000
},
{
"epoch": 1.55,
"learning_rate": 9.65559206523062e-06,
"loss": 0.9393,
"step": 93500
},
{
"epoch": 1.56,
"learning_rate": 9.600274375739876e-06,
"loss": 0.9376,
"step": 94000
},
{
"epoch": 1.57,
"learning_rate": 9.54495668624913e-06,
"loss": 0.929,
"step": 94500
},
{
"epoch": 1.58,
"learning_rate": 9.489638996758384e-06,
"loss": 0.9438,
"step": 95000
},
{
"epoch": 1.58,
"learning_rate": 9.434321307267639e-06,
"loss": 0.9369,
"step": 95500
},
{
"epoch": 1.59,
"learning_rate": 9.379003617776895e-06,
"loss": 0.9364,
"step": 96000
},
{
"epoch": 1.6,
"learning_rate": 9.323685928286149e-06,
"loss": 0.9375,
"step": 96500
},
{
"epoch": 1.61,
"learning_rate": 9.268368238795403e-06,
"loss": 0.8816,
"step": 97000
},
{
"epoch": 1.62,
"learning_rate": 9.213050549304657e-06,
"loss": 0.916,
"step": 97500
},
{
"epoch": 1.63,
"learning_rate": 9.157732859813913e-06,
"loss": 0.994,
"step": 98000
},
{
"epoch": 1.63,
"learning_rate": 9.102415170323166e-06,
"loss": 0.9202,
"step": 98500
},
{
"epoch": 1.64,
"learning_rate": 9.04709748083242e-06,
"loss": 0.9044,
"step": 99000
},
{
"epoch": 1.65,
"learning_rate": 8.991779791341676e-06,
"loss": 0.8644,
"step": 99500
},
{
"epoch": 1.66,
"learning_rate": 8.93646210185093e-06,
"loss": 0.8926,
"step": 100000
},
{
"epoch": 1.67,
"learning_rate": 8.881144412360185e-06,
"loss": 0.9647,
"step": 100500
},
{
"epoch": 1.68,
"learning_rate": 8.825826722869439e-06,
"loss": 0.9277,
"step": 101000
},
{
"epoch": 1.68,
"learning_rate": 8.770509033378695e-06,
"loss": 0.8776,
"step": 101500
},
{
"epoch": 1.69,
"learning_rate": 8.71519134388795e-06,
"loss": 0.8863,
"step": 102000
},
{
"epoch": 1.7,
"learning_rate": 8.659873654397204e-06,
"loss": 0.9286,
"step": 102500
},
{
"epoch": 1.71,
"learning_rate": 8.604555964906458e-06,
"loss": 0.9329,
"step": 103000
},
{
"epoch": 1.72,
"learning_rate": 8.549238275415714e-06,
"loss": 0.9091,
"step": 103500
},
{
"epoch": 1.73,
"learning_rate": 8.493920585924968e-06,
"loss": 0.9215,
"step": 104000
},
{
"epoch": 1.73,
"learning_rate": 8.438602896434222e-06,
"loss": 0.9013,
"step": 104500
},
{
"epoch": 1.74,
"learning_rate": 8.383285206943477e-06,
"loss": 0.9064,
"step": 105000
},
{
"epoch": 1.75,
"learning_rate": 8.327967517452733e-06,
"loss": 0.8798,
"step": 105500
},
{
"epoch": 1.76,
"learning_rate": 8.272649827961985e-06,
"loss": 0.9142,
"step": 106000
},
{
"epoch": 1.77,
"learning_rate": 8.217332138471241e-06,
"loss": 0.9415,
"step": 106500
},
{
"epoch": 1.78,
"learning_rate": 8.162014448980496e-06,
"loss": 0.8729,
"step": 107000
},
{
"epoch": 1.78,
"learning_rate": 8.10669675948975e-06,
"loss": 0.9351,
"step": 107500
},
{
"epoch": 1.79,
"learning_rate": 8.051379069999004e-06,
"loss": 0.9186,
"step": 108000
},
{
"epoch": 1.8,
"learning_rate": 7.99606138050826e-06,
"loss": 0.9713,
"step": 108500
},
{
"epoch": 1.81,
"learning_rate": 7.940743691017514e-06,
"loss": 0.9179,
"step": 109000
},
{
"epoch": 1.82,
"learning_rate": 7.885426001526769e-06,
"loss": 0.9631,
"step": 109500
},
{
"epoch": 1.83,
"learning_rate": 7.830108312036023e-06,
"loss": 0.891,
"step": 110000
},
{
"epoch": 1.83,
"learning_rate": 7.774790622545279e-06,
"loss": 0.9189,
"step": 110500
},
{
"epoch": 1.84,
"learning_rate": 7.719472933054533e-06,
"loss": 0.8631,
"step": 111000
},
{
"epoch": 1.85,
"learning_rate": 7.664155243563787e-06,
"loss": 0.9599,
"step": 111500
},
{
"epoch": 1.86,
"learning_rate": 7.608837554073042e-06,
"loss": 0.9375,
"step": 112000
},
{
"epoch": 1.87,
"learning_rate": 7.553519864582297e-06,
"loss": 0.921,
"step": 112500
},
{
"epoch": 1.88,
"learning_rate": 7.498202175091551e-06,
"loss": 0.8958,
"step": 113000
},
{
"epoch": 1.88,
"learning_rate": 7.442884485600806e-06,
"loss": 0.9505,
"step": 113500
},
{
"epoch": 1.89,
"learning_rate": 7.3875667961100605e-06,
"loss": 0.9405,
"step": 114000
},
{
"epoch": 1.9,
"learning_rate": 7.332249106619316e-06,
"loss": 0.8996,
"step": 114500
},
{
"epoch": 1.91,
"learning_rate": 7.27693141712857e-06,
"loss": 0.8697,
"step": 115000
},
{
"epoch": 1.92,
"learning_rate": 7.221613727637825e-06,
"loss": 0.9324,
"step": 115500
},
{
"epoch": 1.93,
"learning_rate": 7.166296038147079e-06,
"loss": 0.8865,
"step": 116000
},
{
"epoch": 1.93,
"learning_rate": 7.1109783486563345e-06,
"loss": 0.9143,
"step": 116500
},
{
"epoch": 1.94,
"learning_rate": 7.055660659165589e-06,
"loss": 0.8974,
"step": 117000
},
{
"epoch": 1.95,
"learning_rate": 7.000342969674844e-06,
"loss": 0.926,
"step": 117500
},
{
"epoch": 1.96,
"learning_rate": 6.945025280184098e-06,
"loss": 0.9321,
"step": 118000
},
{
"epoch": 1.97,
"learning_rate": 6.889707590693352e-06,
"loss": 0.9175,
"step": 118500
},
{
"epoch": 1.97,
"learning_rate": 6.834389901202607e-06,
"loss": 0.9066,
"step": 119000
},
{
"epoch": 1.98,
"learning_rate": 6.779072211711861e-06,
"loss": 0.9149,
"step": 119500
},
{
"epoch": 1.99,
"learning_rate": 6.723754522221116e-06,
"loss": 0.9249,
"step": 120000
},
{
"epoch": 2.0,
"learning_rate": 6.66843683273037e-06,
"loss": 0.8616,
"step": 120500
},
{
"epoch": 2.01,
"learning_rate": 6.6131191432396255e-06,
"loss": 0.933,
"step": 121000
},
{
"epoch": 2.02,
"learning_rate": 6.55780145374888e-06,
"loss": 0.9157,
"step": 121500
},
{
"epoch": 2.02,
"learning_rate": 6.502483764258135e-06,
"loss": 0.924,
"step": 122000
},
{
"epoch": 2.03,
"learning_rate": 6.447166074767389e-06,
"loss": 0.8637,
"step": 122500
},
{
"epoch": 2.04,
"learning_rate": 6.391848385276644e-06,
"loss": 0.9215,
"step": 123000
},
{
"epoch": 2.05,
"learning_rate": 6.336530695785899e-06,
"loss": 0.9098,
"step": 123500
},
{
"epoch": 2.06,
"learning_rate": 6.281213006295154e-06,
"loss": 0.921,
"step": 124000
},
{
"epoch": 2.07,
"learning_rate": 6.225895316804408e-06,
"loss": 0.8698,
"step": 124500
},
{
"epoch": 2.07,
"learning_rate": 6.170577627313663e-06,
"loss": 0.9047,
"step": 125000
},
{
"epoch": 2.08,
"learning_rate": 6.1152599378229175e-06,
"loss": 0.9108,
"step": 125500
},
{
"epoch": 2.09,
"learning_rate": 6.059942248332173e-06,
"loss": 0.8666,
"step": 126000
},
{
"epoch": 2.1,
"learning_rate": 6.004624558841427e-06,
"loss": 0.909,
"step": 126500
},
{
"epoch": 2.11,
"learning_rate": 5.949306869350682e-06,
"loss": 0.9238,
"step": 127000
},
{
"epoch": 2.12,
"learning_rate": 5.8939891798599354e-06,
"loss": 0.8946,
"step": 127500
},
{
"epoch": 2.12,
"learning_rate": 5.838671490369191e-06,
"loss": 0.9025,
"step": 128000
},
{
"epoch": 2.13,
"learning_rate": 5.783353800878445e-06,
"loss": 0.9355,
"step": 128500
},
{
"epoch": 2.14,
"learning_rate": 5.7280361113877e-06,
"loss": 0.8881,
"step": 129000
},
{
"epoch": 2.15,
"learning_rate": 5.672718421896954e-06,
"loss": 0.9549,
"step": 129500
},
{
"epoch": 2.16,
"learning_rate": 5.617400732406209e-06,
"loss": 0.9646,
"step": 130000
},
{
"epoch": 2.17,
"learning_rate": 5.562083042915464e-06,
"loss": 0.9239,
"step": 130500
},
{
"epoch": 2.17,
"learning_rate": 5.506765353424719e-06,
"loss": 0.9045,
"step": 131000
},
{
"epoch": 2.18,
"learning_rate": 5.451447663933973e-06,
"loss": 0.9572,
"step": 131500
},
{
"epoch": 2.19,
"learning_rate": 5.396129974443228e-06,
"loss": 0.8585,
"step": 132000
},
{
"epoch": 2.2,
"learning_rate": 5.3408122849524825e-06,
"loss": 0.9026,
"step": 132500
},
{
"epoch": 2.21,
"learning_rate": 5.285494595461738e-06,
"loss": 0.9043,
"step": 133000
},
{
"epoch": 2.22,
"learning_rate": 5.230176905970992e-06,
"loss": 0.9234,
"step": 133500
},
{
"epoch": 2.22,
"learning_rate": 5.174859216480247e-06,
"loss": 0.8722,
"step": 134000
},
{
"epoch": 2.23,
"learning_rate": 5.119541526989501e-06,
"loss": 0.9212,
"step": 134500
},
{
"epoch": 2.24,
"learning_rate": 5.064223837498756e-06,
"loss": 0.8981,
"step": 135000
},
{
"epoch": 2.25,
"learning_rate": 5.008906148008011e-06,
"loss": 0.9532,
"step": 135500
},
{
"epoch": 2.26,
"learning_rate": 4.953588458517265e-06,
"loss": 0.9129,
"step": 136000
},
{
"epoch": 2.27,
"learning_rate": 4.898270769026519e-06,
"loss": 0.9075,
"step": 136500
},
{
"epoch": 2.27,
"learning_rate": 4.842953079535774e-06,
"loss": 0.9206,
"step": 137000
},
{
"epoch": 2.28,
"learning_rate": 4.787635390045029e-06,
"loss": 0.8512,
"step": 137500
},
{
"epoch": 2.29,
"learning_rate": 4.732317700554284e-06,
"loss": 0.9086,
"step": 138000
},
{
"epoch": 2.3,
"learning_rate": 4.677000011063538e-06,
"loss": 0.8828,
"step": 138500
},
{
"epoch": 2.31,
"learning_rate": 4.621682321572793e-06,
"loss": 0.9071,
"step": 139000
},
{
"epoch": 2.32,
"learning_rate": 4.5663646320820475e-06,
"loss": 0.8957,
"step": 139500
},
{
"epoch": 2.32,
"learning_rate": 4.511046942591303e-06,
"loss": 0.9506,
"step": 140000
},
{
"epoch": 2.33,
"learning_rate": 4.455729253100557e-06,
"loss": 0.9241,
"step": 140500
},
{
"epoch": 2.34,
"learning_rate": 4.400411563609812e-06,
"loss": 0.9283,
"step": 141000
},
{
"epoch": 2.35,
"learning_rate": 4.345093874119066e-06,
"loss": 0.9078,
"step": 141500
},
{
"epoch": 2.36,
"learning_rate": 4.2897761846283206e-06,
"loss": 0.8944,
"step": 142000
},
{
"epoch": 2.36,
"learning_rate": 4.234458495137576e-06,
"loss": 0.9283,
"step": 142500
},
{
"epoch": 2.37,
"learning_rate": 4.17914080564683e-06,
"loss": 0.9263,
"step": 143000
},
{
"epoch": 2.38,
"learning_rate": 4.123823116156085e-06,
"loss": 0.9381,
"step": 143500
},
{
"epoch": 2.39,
"learning_rate": 4.068505426665339e-06,
"loss": 0.9135,
"step": 144000
},
{
"epoch": 2.4,
"learning_rate": 4.0131877371745945e-06,
"loss": 0.8499,
"step": 144500
},
{
"epoch": 2.41,
"learning_rate": 3.957870047683849e-06,
"loss": 0.9364,
"step": 145000
},
{
"epoch": 2.41,
"learning_rate": 3.902552358193104e-06,
"loss": 0.8836,
"step": 145500
},
{
"epoch": 2.42,
"learning_rate": 3.847234668702358e-06,
"loss": 0.8774,
"step": 146000
},
{
"epoch": 2.43,
"learning_rate": 3.791916979211613e-06,
"loss": 0.9224,
"step": 146500
},
{
"epoch": 2.44,
"learning_rate": 3.7365992897208676e-06,
"loss": 0.8753,
"step": 147000
},
{
"epoch": 2.45,
"learning_rate": 3.6812816002301223e-06,
"loss": 0.9015,
"step": 147500
},
{
"epoch": 2.46,
"learning_rate": 3.625963910739376e-06,
"loss": 0.9001,
"step": 148000
},
{
"epoch": 2.46,
"learning_rate": 3.570646221248631e-06,
"loss": 0.9434,
"step": 148500
},
{
"epoch": 2.47,
"learning_rate": 3.5153285317578856e-06,
"loss": 0.9276,
"step": 149000
},
{
"epoch": 2.48,
"learning_rate": 3.4600108422671403e-06,
"loss": 0.9056,
"step": 149500
},
{
"epoch": 2.49,
"learning_rate": 3.404693152776395e-06,
"loss": 0.884,
"step": 150000
},
{
"epoch": 2.5,
"learning_rate": 3.3493754632856497e-06,
"loss": 0.9658,
"step": 150500
},
{
"epoch": 2.51,
"learning_rate": 3.2940577737949044e-06,
"loss": 0.9596,
"step": 151000
},
{
"epoch": 2.51,
"learning_rate": 3.2387400843041587e-06,
"loss": 0.9951,
"step": 151500
},
{
"epoch": 2.52,
"learning_rate": 3.1834223948134134e-06,
"loss": 0.8763,
"step": 152000
},
{
"epoch": 2.53,
"learning_rate": 3.128104705322668e-06,
"loss": 0.8953,
"step": 152500
},
{
"epoch": 2.54,
"learning_rate": 3.0727870158319228e-06,
"loss": 0.9155,
"step": 153000
},
{
"epoch": 2.55,
"learning_rate": 3.0174693263411775e-06,
"loss": 0.89,
"step": 153500
},
{
"epoch": 2.56,
"learning_rate": 2.962151636850432e-06,
"loss": 0.8809,
"step": 154000
},
{
"epoch": 2.56,
"learning_rate": 2.906833947359687e-06,
"loss": 0.8995,
"step": 154500
},
{
"epoch": 2.57,
"learning_rate": 2.8515162578689416e-06,
"loss": 0.9092,
"step": 155000
},
{
"epoch": 2.58,
"learning_rate": 2.7961985683781963e-06,
"loss": 0.9475,
"step": 155500
},
{
"epoch": 2.59,
"learning_rate": 2.740880878887451e-06,
"loss": 0.8896,
"step": 156000
},
{
"epoch": 2.6,
"learning_rate": 2.6855631893967053e-06,
"loss": 0.9145,
"step": 156500
},
{
"epoch": 2.61,
"learning_rate": 2.63024549990596e-06,
"loss": 0.9796,
"step": 157000
},
{
"epoch": 2.61,
"learning_rate": 2.5749278104152147e-06,
"loss": 0.9451,
"step": 157500
},
{
"epoch": 2.62,
"learning_rate": 2.5196101209244694e-06,
"loss": 0.9386,
"step": 158000
},
{
"epoch": 2.63,
"learning_rate": 2.464292431433724e-06,
"loss": 0.9607,
"step": 158500
},
{
"epoch": 2.64,
"learning_rate": 2.408974741942979e-06,
"loss": 0.9343,
"step": 159000
},
{
"epoch": 2.65,
"learning_rate": 2.3536570524522335e-06,
"loss": 0.9211,
"step": 159500
},
{
"epoch": 2.66,
"learning_rate": 2.298339362961488e-06,
"loss": 0.9131,
"step": 160000
},
{
"epoch": 2.66,
"learning_rate": 2.243021673470743e-06,
"loss": 0.9738,
"step": 160500
},
{
"epoch": 2.67,
"learning_rate": 2.187703983979997e-06,
"loss": 0.9394,
"step": 161000
},
{
"epoch": 2.68,
"learning_rate": 2.132386294489252e-06,
"loss": 0.9542,
"step": 161500
},
{
"epoch": 2.69,
"learning_rate": 2.0770686049985066e-06,
"loss": 0.9253,
"step": 162000
},
{
"epoch": 2.7,
"learning_rate": 2.0217509155077613e-06,
"loss": 0.8945,
"step": 162500
},
{
"epoch": 2.71,
"learning_rate": 1.9664332260170156e-06,
"loss": 0.9523,
"step": 163000
},
{
"epoch": 2.71,
"learning_rate": 1.9111155365262703e-06,
"loss": 0.8762,
"step": 163500
},
{
"epoch": 2.72,
"learning_rate": 1.855797847035525e-06,
"loss": 0.8997,
"step": 164000
},
{
"epoch": 2.73,
"learning_rate": 1.8004801575447797e-06,
"loss": 0.9035,
"step": 164500
},
{
"epoch": 2.74,
"learning_rate": 1.7451624680540344e-06,
"loss": 0.9174,
"step": 165000
},
{
"epoch": 2.75,
"learning_rate": 1.689844778563289e-06,
"loss": 0.8572,
"step": 165500
},
{
"epoch": 2.75,
"learning_rate": 1.6345270890725438e-06,
"loss": 0.9433,
"step": 166000
},
{
"epoch": 2.76,
"learning_rate": 1.5792093995817983e-06,
"loss": 0.9124,
"step": 166500
},
{
"epoch": 2.77,
"learning_rate": 1.523891710091053e-06,
"loss": 0.8998,
"step": 167000
},
{
"epoch": 2.78,
"learning_rate": 1.4685740206003077e-06,
"loss": 0.9118,
"step": 167500
},
{
"epoch": 2.79,
"learning_rate": 1.4132563311095624e-06,
"loss": 0.9285,
"step": 168000
},
{
"epoch": 2.8,
"learning_rate": 1.357938641618817e-06,
"loss": 0.929,
"step": 168500
},
{
"epoch": 2.8,
"learning_rate": 1.3026209521280716e-06,
"loss": 0.9209,
"step": 169000
},
{
"epoch": 2.81,
"learning_rate": 1.2473032626373263e-06,
"loss": 0.8879,
"step": 169500
},
{
"epoch": 2.82,
"learning_rate": 1.191985573146581e-06,
"loss": 0.9313,
"step": 170000
},
{
"epoch": 2.83,
"learning_rate": 1.1366678836558355e-06,
"loss": 0.9262,
"step": 170500
},
{
"epoch": 2.84,
"learning_rate": 1.0813501941650902e-06,
"loss": 0.8769,
"step": 171000
},
{
"epoch": 2.85,
"learning_rate": 1.026032504674345e-06,
"loss": 0.926,
"step": 171500
},
{
"epoch": 2.85,
"learning_rate": 9.707148151835994e-07,
"loss": 0.8786,
"step": 172000
},
{
"epoch": 2.86,
"learning_rate": 9.153971256928541e-07,
"loss": 0.8979,
"step": 172500
},
{
"epoch": 2.87,
"learning_rate": 8.600794362021088e-07,
"loss": 0.9133,
"step": 173000
},
{
"epoch": 2.88,
"learning_rate": 8.047617467113634e-07,
"loss": 0.9347,
"step": 173500
},
{
"epoch": 2.89,
"learning_rate": 7.494440572206181e-07,
"loss": 0.9344,
"step": 174000
},
{
"epoch": 2.9,
"learning_rate": 6.941263677298728e-07,
"loss": 0.9326,
"step": 174500
},
{
"epoch": 2.9,
"learning_rate": 6.388086782391274e-07,
"loss": 0.8966,
"step": 175000
},
{
"epoch": 2.91,
"learning_rate": 5.83490988748382e-07,
"loss": 0.9497,
"step": 175500
},
{
"epoch": 2.92,
"learning_rate": 5.281732992576367e-07,
"loss": 0.9086,
"step": 176000
},
{
"epoch": 2.93,
"learning_rate": 4.7285560976689126e-07,
"loss": 0.9177,
"step": 176500
},
{
"epoch": 2.94,
"learning_rate": 4.175379202761459e-07,
"loss": 0.9335,
"step": 177000
},
{
"epoch": 2.95,
"learning_rate": 3.6222023078540056e-07,
"loss": 0.8967,
"step": 177500
},
{
"epoch": 2.95,
"learning_rate": 3.0690254129465527e-07,
"loss": 0.9551,
"step": 178000
},
{
"epoch": 2.96,
"learning_rate": 2.5158485180390987e-07,
"loss": 0.9525,
"step": 178500
},
{
"epoch": 2.97,
"learning_rate": 1.9626716231316452e-07,
"loss": 0.9046,
"step": 179000
},
{
"epoch": 2.98,
"learning_rate": 1.4094947282241917e-07,
"loss": 0.9163,
"step": 179500
},
{
"epoch": 2.99,
"learning_rate": 8.56317833316738e-08,
"loss": 0.935,
"step": 180000
},
{
"epoch": 3.0,
"learning_rate": 3.0314093840928456e-08,
"loss": 0.8872,
"step": 180500
},
{
"epoch": 3.0,
"step": 180774,
"total_flos": 2.9686234485024e+17,
"train_loss": 0.925638329537373,
"train_runtime": 31623.5954,
"train_samples_per_second": 5.716,
"train_steps_per_second": 5.716
}
],
"logging_steps": 500,
"max_steps": 180774,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 2.9686234485024e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}