bodo-roberta-mlm-base / trainer_state.json
Sanjib Narzary
Pretrained Bodo LM using Roberta base configuration from scratch and line by line
a393828
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"global_step": 168252,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"learning_rate": 4.9851413356156244e-05,
"loss": 4.6864,
"step": 500
},
{
"epoch": 0.02,
"learning_rate": 4.9702826712312485e-05,
"loss": 4.4314,
"step": 1000
},
{
"epoch": 0.03,
"learning_rate": 4.9554240068468726e-05,
"loss": 4.1833,
"step": 1500
},
{
"epoch": 0.04,
"learning_rate": 4.940565342462497e-05,
"loss": 4.1583,
"step": 2000
},
{
"epoch": 0.04,
"learning_rate": 4.9257066780781215e-05,
"loss": 4.2004,
"step": 2500
},
{
"epoch": 0.05,
"learning_rate": 4.910848013693745e-05,
"loss": 4.2117,
"step": 3000
},
{
"epoch": 0.06,
"learning_rate": 4.89598934930937e-05,
"loss": 4.2172,
"step": 3500
},
{
"epoch": 0.07,
"learning_rate": 4.881130684924994e-05,
"loss": 4.2154,
"step": 4000
},
{
"epoch": 0.08,
"learning_rate": 4.866272020540618e-05,
"loss": 4.1957,
"step": 4500
},
{
"epoch": 0.09,
"learning_rate": 4.851413356156242e-05,
"loss": 4.199,
"step": 5000
},
{
"epoch": 0.1,
"learning_rate": 4.836554691771866e-05,
"loss": 4.2366,
"step": 5500
},
{
"epoch": 0.11,
"learning_rate": 4.82169602738749e-05,
"loss": 4.1593,
"step": 6000
},
{
"epoch": 0.12,
"learning_rate": 4.8068373630031144e-05,
"loss": 4.1763,
"step": 6500
},
{
"epoch": 0.12,
"learning_rate": 4.7919786986187386e-05,
"loss": 4.1563,
"step": 7000
},
{
"epoch": 0.13,
"learning_rate": 4.7771200342343634e-05,
"loss": 4.18,
"step": 7500
},
{
"epoch": 0.14,
"learning_rate": 4.7622613698499875e-05,
"loss": 4.1764,
"step": 8000
},
{
"epoch": 0.15,
"learning_rate": 4.747402705465611e-05,
"loss": 4.1724,
"step": 8500
},
{
"epoch": 0.16,
"learning_rate": 4.732544041081236e-05,
"loss": 4.1814,
"step": 9000
},
{
"epoch": 0.17,
"learning_rate": 4.71768537669686e-05,
"loss": 4.1784,
"step": 9500
},
{
"epoch": 0.18,
"learning_rate": 4.702826712312484e-05,
"loss": 4.19,
"step": 10000
},
{
"epoch": 0.19,
"learning_rate": 4.687968047928108e-05,
"loss": 4.165,
"step": 10500
},
{
"epoch": 0.2,
"learning_rate": 4.673109383543732e-05,
"loss": 4.1738,
"step": 11000
},
{
"epoch": 0.21,
"learning_rate": 4.658250719159356e-05,
"loss": 4.1795,
"step": 11500
},
{
"epoch": 0.21,
"learning_rate": 4.6433920547749804e-05,
"loss": 4.1975,
"step": 12000
},
{
"epoch": 0.22,
"learning_rate": 4.6285333903906045e-05,
"loss": 4.1582,
"step": 12500
},
{
"epoch": 0.23,
"learning_rate": 4.613674726006229e-05,
"loss": 4.1763,
"step": 13000
},
{
"epoch": 0.24,
"learning_rate": 4.5988160616218534e-05,
"loss": 4.1703,
"step": 13500
},
{
"epoch": 0.25,
"learning_rate": 4.583957397237477e-05,
"loss": 4.1839,
"step": 14000
},
{
"epoch": 0.26,
"learning_rate": 4.569098732853102e-05,
"loss": 4.2002,
"step": 14500
},
{
"epoch": 0.27,
"learning_rate": 4.554240068468726e-05,
"loss": 4.1428,
"step": 15000
},
{
"epoch": 0.28,
"learning_rate": 4.53938140408435e-05,
"loss": 4.187,
"step": 15500
},
{
"epoch": 0.29,
"learning_rate": 4.524522739699974e-05,
"loss": 4.1869,
"step": 16000
},
{
"epoch": 0.29,
"learning_rate": 4.509664075315598e-05,
"loss": 4.1829,
"step": 16500
},
{
"epoch": 0.3,
"learning_rate": 4.494805410931223e-05,
"loss": 4.1572,
"step": 17000
},
{
"epoch": 0.31,
"learning_rate": 4.4799467465468464e-05,
"loss": 4.1367,
"step": 17500
},
{
"epoch": 0.32,
"learning_rate": 4.4650880821624705e-05,
"loss": 4.1692,
"step": 18000
},
{
"epoch": 0.33,
"learning_rate": 4.450229417778095e-05,
"loss": 4.182,
"step": 18500
},
{
"epoch": 0.34,
"learning_rate": 4.435370753393719e-05,
"loss": 4.2536,
"step": 19000
},
{
"epoch": 0.35,
"learning_rate": 4.4205120890093435e-05,
"loss": 4.2086,
"step": 19500
},
{
"epoch": 0.36,
"learning_rate": 4.4056534246249676e-05,
"loss": 4.1256,
"step": 20000
},
{
"epoch": 0.37,
"learning_rate": 4.390794760240592e-05,
"loss": 4.1798,
"step": 20500
},
{
"epoch": 0.37,
"learning_rate": 4.375936095856216e-05,
"loss": 4.1822,
"step": 21000
},
{
"epoch": 0.38,
"learning_rate": 4.36107743147184e-05,
"loss": 4.1925,
"step": 21500
},
{
"epoch": 0.39,
"learning_rate": 4.346218767087464e-05,
"loss": 4.1702,
"step": 22000
},
{
"epoch": 0.4,
"learning_rate": 4.331360102703089e-05,
"loss": 4.1983,
"step": 22500
},
{
"epoch": 0.41,
"learning_rate": 4.3165014383187123e-05,
"loss": 4.2146,
"step": 23000
},
{
"epoch": 0.42,
"learning_rate": 4.3016427739343365e-05,
"loss": 4.2053,
"step": 23500
},
{
"epoch": 0.43,
"learning_rate": 4.286784109549961e-05,
"loss": 4.2043,
"step": 24000
},
{
"epoch": 0.44,
"learning_rate": 4.271925445165585e-05,
"loss": 4.1945,
"step": 24500
},
{
"epoch": 0.45,
"learning_rate": 4.2570667807812095e-05,
"loss": 4.1583,
"step": 25000
},
{
"epoch": 0.45,
"learning_rate": 4.2422081163968336e-05,
"loss": 4.2209,
"step": 25500
},
{
"epoch": 0.46,
"learning_rate": 4.227349452012458e-05,
"loss": 4.1892,
"step": 26000
},
{
"epoch": 0.47,
"learning_rate": 4.212490787628082e-05,
"loss": 4.1542,
"step": 26500
},
{
"epoch": 0.48,
"learning_rate": 4.197632123243706e-05,
"loss": 4.1594,
"step": 27000
},
{
"epoch": 0.49,
"learning_rate": 4.18277345885933e-05,
"loss": 4.1926,
"step": 27500
},
{
"epoch": 0.5,
"learning_rate": 4.167914794474955e-05,
"loss": 4.1596,
"step": 28000
},
{
"epoch": 0.51,
"learning_rate": 4.153056130090578e-05,
"loss": 4.1525,
"step": 28500
},
{
"epoch": 0.52,
"learning_rate": 4.138197465706203e-05,
"loss": 4.1282,
"step": 29000
},
{
"epoch": 0.53,
"learning_rate": 4.123338801321827e-05,
"loss": 4.1569,
"step": 29500
},
{
"epoch": 0.53,
"learning_rate": 4.1084801369374507e-05,
"loss": 4.1544,
"step": 30000
},
{
"epoch": 0.54,
"learning_rate": 4.0936214725530755e-05,
"loss": 4.1984,
"step": 30500
},
{
"epoch": 0.55,
"learning_rate": 4.0787628081686996e-05,
"loss": 4.1907,
"step": 31000
},
{
"epoch": 0.56,
"learning_rate": 4.063904143784324e-05,
"loss": 4.2402,
"step": 31500
},
{
"epoch": 0.57,
"learning_rate": 4.049045479399948e-05,
"loss": 4.1914,
"step": 32000
},
{
"epoch": 0.58,
"learning_rate": 4.034186815015572e-05,
"loss": 4.1622,
"step": 32500
},
{
"epoch": 0.59,
"learning_rate": 4.019328150631197e-05,
"loss": 4.1444,
"step": 33000
},
{
"epoch": 0.6,
"learning_rate": 4.00446948624682e-05,
"loss": 4.1581,
"step": 33500
},
{
"epoch": 0.61,
"learning_rate": 3.989610821862444e-05,
"loss": 4.2014,
"step": 34000
},
{
"epoch": 0.62,
"learning_rate": 3.974752157478069e-05,
"loss": 4.1797,
"step": 34500
},
{
"epoch": 0.62,
"learning_rate": 3.959893493093693e-05,
"loss": 4.1669,
"step": 35000
},
{
"epoch": 0.63,
"learning_rate": 3.9450348287093166e-05,
"loss": 4.1538,
"step": 35500
},
{
"epoch": 0.64,
"learning_rate": 3.9301761643249414e-05,
"loss": 4.1359,
"step": 36000
},
{
"epoch": 0.65,
"learning_rate": 3.9153174999405655e-05,
"loss": 4.1551,
"step": 36500
},
{
"epoch": 0.66,
"learning_rate": 3.9004588355561897e-05,
"loss": 4.2037,
"step": 37000
},
{
"epoch": 0.67,
"learning_rate": 3.885600171171814e-05,
"loss": 4.1428,
"step": 37500
},
{
"epoch": 0.68,
"learning_rate": 3.870741506787438e-05,
"loss": 4.1649,
"step": 38000
},
{
"epoch": 0.69,
"learning_rate": 3.855882842403063e-05,
"loss": 4.1858,
"step": 38500
},
{
"epoch": 0.7,
"learning_rate": 3.841024178018686e-05,
"loss": 4.1586,
"step": 39000
},
{
"epoch": 0.7,
"learning_rate": 3.82616551363431e-05,
"loss": 4.1849,
"step": 39500
},
{
"epoch": 0.71,
"learning_rate": 3.811306849249935e-05,
"loss": 4.1875,
"step": 40000
},
{
"epoch": 0.72,
"learning_rate": 3.796448184865559e-05,
"loss": 4.1367,
"step": 40500
},
{
"epoch": 0.73,
"learning_rate": 3.781589520481183e-05,
"loss": 4.1681,
"step": 41000
},
{
"epoch": 0.74,
"learning_rate": 3.7667308560968074e-05,
"loss": 4.1701,
"step": 41500
},
{
"epoch": 0.75,
"learning_rate": 3.7518721917124315e-05,
"loss": 4.1509,
"step": 42000
},
{
"epoch": 0.76,
"learning_rate": 3.737013527328056e-05,
"loss": 4.2155,
"step": 42500
},
{
"epoch": 0.77,
"learning_rate": 3.72215486294368e-05,
"loss": 4.2056,
"step": 43000
},
{
"epoch": 0.78,
"learning_rate": 3.707296198559304e-05,
"loss": 4.1598,
"step": 43500
},
{
"epoch": 0.78,
"learning_rate": 3.6924375341749286e-05,
"loss": 4.1701,
"step": 44000
},
{
"epoch": 0.79,
"learning_rate": 3.677578869790552e-05,
"loss": 4.1735,
"step": 44500
},
{
"epoch": 0.8,
"learning_rate": 3.662720205406177e-05,
"loss": 4.1457,
"step": 45000
},
{
"epoch": 0.81,
"learning_rate": 3.647861541021801e-05,
"loss": 4.1469,
"step": 45500
},
{
"epoch": 0.82,
"learning_rate": 3.633002876637425e-05,
"loss": 4.1014,
"step": 46000
},
{
"epoch": 0.83,
"learning_rate": 3.618144212253049e-05,
"loss": 4.1308,
"step": 46500
},
{
"epoch": 0.84,
"learning_rate": 3.6032855478686734e-05,
"loss": 4.1282,
"step": 47000
},
{
"epoch": 0.85,
"learning_rate": 3.5884268834842975e-05,
"loss": 4.1384,
"step": 47500
},
{
"epoch": 0.86,
"learning_rate": 3.5735682190999216e-05,
"loss": 4.1709,
"step": 48000
},
{
"epoch": 0.86,
"learning_rate": 3.558709554715546e-05,
"loss": 4.1861,
"step": 48500
},
{
"epoch": 0.87,
"learning_rate": 3.54385089033117e-05,
"loss": 4.1939,
"step": 49000
},
{
"epoch": 0.88,
"learning_rate": 3.5289922259467946e-05,
"loss": 4.186,
"step": 49500
},
{
"epoch": 0.89,
"learning_rate": 3.514133561562418e-05,
"loss": 4.1661,
"step": 50000
},
{
"epoch": 0.9,
"learning_rate": 3.499274897178043e-05,
"loss": 4.1291,
"step": 50500
},
{
"epoch": 0.91,
"learning_rate": 3.484416232793667e-05,
"loss": 4.1377,
"step": 51000
},
{
"epoch": 0.92,
"learning_rate": 3.469557568409291e-05,
"loss": 4.2083,
"step": 51500
},
{
"epoch": 0.93,
"learning_rate": 3.454698904024915e-05,
"loss": 4.1944,
"step": 52000
},
{
"epoch": 0.94,
"learning_rate": 3.439840239640539e-05,
"loss": 4.1597,
"step": 52500
},
{
"epoch": 0.95,
"learning_rate": 3.4249815752561634e-05,
"loss": 4.1432,
"step": 53000
},
{
"epoch": 0.95,
"learning_rate": 3.4101229108717876e-05,
"loss": 4.1659,
"step": 53500
},
{
"epoch": 0.96,
"learning_rate": 3.395264246487412e-05,
"loss": 4.1568,
"step": 54000
},
{
"epoch": 0.97,
"learning_rate": 3.3804055821030365e-05,
"loss": 4.1673,
"step": 54500
},
{
"epoch": 0.98,
"learning_rate": 3.3655469177186606e-05,
"loss": 4.1522,
"step": 55000
},
{
"epoch": 0.99,
"learning_rate": 3.350688253334284e-05,
"loss": 4.1354,
"step": 55500
},
{
"epoch": 1.0,
"learning_rate": 3.335829588949909e-05,
"loss": 4.172,
"step": 56000
},
{
"epoch": 1.01,
"learning_rate": 3.320970924565533e-05,
"loss": 4.1534,
"step": 56500
},
{
"epoch": 1.02,
"learning_rate": 3.306112260181157e-05,
"loss": 4.1092,
"step": 57000
},
{
"epoch": 1.03,
"learning_rate": 3.291253595796781e-05,
"loss": 4.1695,
"step": 57500
},
{
"epoch": 1.03,
"learning_rate": 3.276394931412405e-05,
"loss": 4.1512,
"step": 58000
},
{
"epoch": 1.04,
"learning_rate": 3.2615362670280294e-05,
"loss": 4.1747,
"step": 58500
},
{
"epoch": 1.05,
"learning_rate": 3.2466776026436535e-05,
"loss": 4.1872,
"step": 59000
},
{
"epoch": 1.06,
"learning_rate": 3.2318189382592776e-05,
"loss": 4.1634,
"step": 59500
},
{
"epoch": 1.07,
"learning_rate": 3.2169602738749024e-05,
"loss": 4.1033,
"step": 60000
},
{
"epoch": 1.08,
"learning_rate": 3.2021016094905265e-05,
"loss": 4.1247,
"step": 60500
},
{
"epoch": 1.09,
"learning_rate": 3.18724294510615e-05,
"loss": 4.1823,
"step": 61000
},
{
"epoch": 1.1,
"learning_rate": 3.172384280721775e-05,
"loss": 4.1718,
"step": 61500
},
{
"epoch": 1.11,
"learning_rate": 3.157525616337399e-05,
"loss": 4.1982,
"step": 62000
},
{
"epoch": 1.11,
"learning_rate": 3.142666951953023e-05,
"loss": 4.1956,
"step": 62500
},
{
"epoch": 1.12,
"learning_rate": 3.127808287568647e-05,
"loss": 4.162,
"step": 63000
},
{
"epoch": 1.13,
"learning_rate": 3.112949623184271e-05,
"loss": 4.1438,
"step": 63500
},
{
"epoch": 1.14,
"learning_rate": 3.098090958799896e-05,
"loss": 4.2248,
"step": 64000
},
{
"epoch": 1.15,
"learning_rate": 3.0832322944155195e-05,
"loss": 4.1655,
"step": 64500
},
{
"epoch": 1.16,
"learning_rate": 3.0683736300311436e-05,
"loss": 4.152,
"step": 65000
},
{
"epoch": 1.17,
"learning_rate": 3.0535149656467684e-05,
"loss": 4.1784,
"step": 65500
},
{
"epoch": 1.18,
"learning_rate": 3.0386563012623925e-05,
"loss": 4.1958,
"step": 66000
},
{
"epoch": 1.19,
"learning_rate": 3.0237976368780163e-05,
"loss": 4.156,
"step": 66500
},
{
"epoch": 1.19,
"learning_rate": 3.0089389724936407e-05,
"loss": 4.178,
"step": 67000
},
{
"epoch": 1.2,
"learning_rate": 2.994080308109265e-05,
"loss": 4.1562,
"step": 67500
},
{
"epoch": 1.21,
"learning_rate": 2.9792216437248886e-05,
"loss": 4.1535,
"step": 68000
},
{
"epoch": 1.22,
"learning_rate": 2.964362979340513e-05,
"loss": 4.1224,
"step": 68500
},
{
"epoch": 1.23,
"learning_rate": 2.9495043149561376e-05,
"loss": 4.1547,
"step": 69000
},
{
"epoch": 1.24,
"learning_rate": 2.9346456505717617e-05,
"loss": 4.1378,
"step": 69500
},
{
"epoch": 1.25,
"learning_rate": 2.9197869861873855e-05,
"loss": 4.1573,
"step": 70000
},
{
"epoch": 1.26,
"learning_rate": 2.90492832180301e-05,
"loss": 4.1575,
"step": 70500
},
{
"epoch": 1.27,
"learning_rate": 2.8900696574186344e-05,
"loss": 4.1726,
"step": 71000
},
{
"epoch": 1.27,
"learning_rate": 2.875210993034258e-05,
"loss": 4.1971,
"step": 71500
},
{
"epoch": 1.28,
"learning_rate": 2.8603523286498823e-05,
"loss": 4.1289,
"step": 72000
},
{
"epoch": 1.29,
"learning_rate": 2.8454936642655067e-05,
"loss": 4.1571,
"step": 72500
},
{
"epoch": 1.3,
"learning_rate": 2.8306349998811312e-05,
"loss": 4.124,
"step": 73000
},
{
"epoch": 1.31,
"learning_rate": 2.815776335496755e-05,
"loss": 4.1769,
"step": 73500
},
{
"epoch": 1.32,
"learning_rate": 2.800917671112379e-05,
"loss": 4.1581,
"step": 74000
},
{
"epoch": 1.33,
"learning_rate": 2.7860590067280035e-05,
"loss": 4.1828,
"step": 74500
},
{
"epoch": 1.34,
"learning_rate": 2.7712003423436276e-05,
"loss": 4.1448,
"step": 75000
},
{
"epoch": 1.35,
"learning_rate": 2.7563416779592514e-05,
"loss": 4.2025,
"step": 75500
},
{
"epoch": 1.36,
"learning_rate": 2.741483013574876e-05,
"loss": 4.1474,
"step": 76000
},
{
"epoch": 1.36,
"learning_rate": 2.7266243491905003e-05,
"loss": 4.2199,
"step": 76500
},
{
"epoch": 1.37,
"learning_rate": 2.711765684806124e-05,
"loss": 4.1517,
"step": 77000
},
{
"epoch": 1.38,
"learning_rate": 2.6969070204217482e-05,
"loss": 4.1469,
"step": 77500
},
{
"epoch": 1.39,
"learning_rate": 2.6820483560373727e-05,
"loss": 4.1104,
"step": 78000
},
{
"epoch": 1.4,
"learning_rate": 2.667189691652997e-05,
"loss": 4.1482,
"step": 78500
},
{
"epoch": 1.41,
"learning_rate": 2.652331027268621e-05,
"loss": 4.1674,
"step": 79000
},
{
"epoch": 1.42,
"learning_rate": 2.637472362884245e-05,
"loss": 4.1601,
"step": 79500
},
{
"epoch": 1.43,
"learning_rate": 2.6226136984998695e-05,
"loss": 4.2056,
"step": 80000
},
{
"epoch": 1.44,
"learning_rate": 2.607755034115494e-05,
"loss": 4.1223,
"step": 80500
},
{
"epoch": 1.44,
"learning_rate": 2.5928963697311177e-05,
"loss": 4.1843,
"step": 81000
},
{
"epoch": 1.45,
"learning_rate": 2.578037705346742e-05,
"loss": 4.1564,
"step": 81500
},
{
"epoch": 1.46,
"learning_rate": 2.5631790409623663e-05,
"loss": 4.1476,
"step": 82000
},
{
"epoch": 1.47,
"learning_rate": 2.54832037657799e-05,
"loss": 4.139,
"step": 82500
},
{
"epoch": 1.48,
"learning_rate": 2.5334617121936145e-05,
"loss": 4.1541,
"step": 83000
},
{
"epoch": 1.49,
"learning_rate": 2.5186030478092386e-05,
"loss": 4.1593,
"step": 83500
},
{
"epoch": 1.5,
"learning_rate": 2.503744383424863e-05,
"loss": 4.1922,
"step": 84000
},
{
"epoch": 1.51,
"learning_rate": 2.4888857190404872e-05,
"loss": 4.1539,
"step": 84500
},
{
"epoch": 1.52,
"learning_rate": 2.4740270546561113e-05,
"loss": 4.1665,
"step": 85000
},
{
"epoch": 1.52,
"learning_rate": 2.459168390271735e-05,
"loss": 4.1476,
"step": 85500
},
{
"epoch": 1.53,
"learning_rate": 2.4443097258873596e-05,
"loss": 4.1933,
"step": 86000
},
{
"epoch": 1.54,
"learning_rate": 2.4294510615029837e-05,
"loss": 4.2239,
"step": 86500
},
{
"epoch": 1.55,
"learning_rate": 2.4145923971186078e-05,
"loss": 4.1581,
"step": 87000
},
{
"epoch": 1.56,
"learning_rate": 2.399733732734232e-05,
"loss": 4.217,
"step": 87500
},
{
"epoch": 1.57,
"learning_rate": 2.3848750683498564e-05,
"loss": 4.1504,
"step": 88000
},
{
"epoch": 1.58,
"learning_rate": 2.3700164039654805e-05,
"loss": 4.1499,
"step": 88500
},
{
"epoch": 1.59,
"learning_rate": 2.3551577395811046e-05,
"loss": 4.1879,
"step": 89000
},
{
"epoch": 1.6,
"learning_rate": 2.3402990751967287e-05,
"loss": 4.1592,
"step": 89500
},
{
"epoch": 1.6,
"learning_rate": 2.3254404108123532e-05,
"loss": 4.1356,
"step": 90000
},
{
"epoch": 1.61,
"learning_rate": 2.3105817464279773e-05,
"loss": 4.1924,
"step": 90500
},
{
"epoch": 1.62,
"learning_rate": 2.2957230820436014e-05,
"loss": 4.1122,
"step": 91000
},
{
"epoch": 1.63,
"learning_rate": 2.2808644176592255e-05,
"loss": 4.1681,
"step": 91500
},
{
"epoch": 1.64,
"learning_rate": 2.2660057532748497e-05,
"loss": 4.1771,
"step": 92000
},
{
"epoch": 1.65,
"learning_rate": 2.251147088890474e-05,
"loss": 4.1596,
"step": 92500
},
{
"epoch": 1.66,
"learning_rate": 2.236288424506098e-05,
"loss": 4.1591,
"step": 93000
},
{
"epoch": 1.67,
"learning_rate": 2.2214297601217223e-05,
"loss": 4.1259,
"step": 93500
},
{
"epoch": 1.68,
"learning_rate": 2.2065710957373465e-05,
"loss": 4.1673,
"step": 94000
},
{
"epoch": 1.68,
"learning_rate": 2.191712431352971e-05,
"loss": 4.1816,
"step": 94500
},
{
"epoch": 1.69,
"learning_rate": 2.1768537669685947e-05,
"loss": 4.1738,
"step": 95000
},
{
"epoch": 1.7,
"learning_rate": 2.1619951025842188e-05,
"loss": 4.1417,
"step": 95500
},
{
"epoch": 1.71,
"learning_rate": 2.1471364381998433e-05,
"loss": 4.141,
"step": 96000
},
{
"epoch": 1.72,
"learning_rate": 2.1322777738154674e-05,
"loss": 4.1754,
"step": 96500
},
{
"epoch": 1.73,
"learning_rate": 2.1174191094310915e-05,
"loss": 4.1311,
"step": 97000
},
{
"epoch": 1.74,
"learning_rate": 2.1025604450467156e-05,
"loss": 4.2245,
"step": 97500
},
{
"epoch": 1.75,
"learning_rate": 2.08770178066234e-05,
"loss": 4.2167,
"step": 98000
},
{
"epoch": 1.76,
"learning_rate": 2.0728431162779642e-05,
"loss": 4.1694,
"step": 98500
},
{
"epoch": 1.77,
"learning_rate": 2.0579844518935883e-05,
"loss": 4.1671,
"step": 99000
},
{
"epoch": 1.77,
"learning_rate": 2.0431257875092124e-05,
"loss": 4.1839,
"step": 99500
},
{
"epoch": 1.78,
"learning_rate": 2.0282671231248365e-05,
"loss": 4.1632,
"step": 100000
},
{
"epoch": 1.79,
"learning_rate": 2.013408458740461e-05,
"loss": 4.2308,
"step": 100500
},
{
"epoch": 1.8,
"learning_rate": 1.9985497943560848e-05,
"loss": 4.1291,
"step": 101000
},
{
"epoch": 1.81,
"learning_rate": 1.9836911299717092e-05,
"loss": 4.1498,
"step": 101500
},
{
"epoch": 1.82,
"learning_rate": 1.9688324655873334e-05,
"loss": 4.1277,
"step": 102000
},
{
"epoch": 1.83,
"learning_rate": 1.9539738012029578e-05,
"loss": 4.1425,
"step": 102500
},
{
"epoch": 1.84,
"learning_rate": 1.9391151368185816e-05,
"loss": 4.1518,
"step": 103000
},
{
"epoch": 1.85,
"learning_rate": 1.924256472434206e-05,
"loss": 4.1907,
"step": 103500
},
{
"epoch": 1.85,
"learning_rate": 1.90939780804983e-05,
"loss": 4.1234,
"step": 104000
},
{
"epoch": 1.86,
"learning_rate": 1.8945391436654543e-05,
"loss": 4.1702,
"step": 104500
},
{
"epoch": 1.87,
"learning_rate": 1.8796804792810784e-05,
"loss": 4.1586,
"step": 105000
},
{
"epoch": 1.88,
"learning_rate": 1.8648218148967025e-05,
"loss": 4.1564,
"step": 105500
},
{
"epoch": 1.89,
"learning_rate": 1.849963150512327e-05,
"loss": 4.1591,
"step": 106000
},
{
"epoch": 1.9,
"learning_rate": 1.835104486127951e-05,
"loss": 4.1811,
"step": 106500
},
{
"epoch": 1.91,
"learning_rate": 1.8202458217435752e-05,
"loss": 4.1795,
"step": 107000
},
{
"epoch": 1.92,
"learning_rate": 1.8053871573591993e-05,
"loss": 4.1605,
"step": 107500
},
{
"epoch": 1.93,
"learning_rate": 1.7905284929748238e-05,
"loss": 4.1761,
"step": 108000
},
{
"epoch": 1.93,
"learning_rate": 1.775669828590448e-05,
"loss": 4.186,
"step": 108500
},
{
"epoch": 1.94,
"learning_rate": 1.760811164206072e-05,
"loss": 4.2381,
"step": 109000
},
{
"epoch": 1.95,
"learning_rate": 1.745952499821696e-05,
"loss": 4.1839,
"step": 109500
},
{
"epoch": 1.96,
"learning_rate": 1.7310938354373202e-05,
"loss": 4.2121,
"step": 110000
},
{
"epoch": 1.97,
"learning_rate": 1.7162351710529444e-05,
"loss": 4.1832,
"step": 110500
},
{
"epoch": 1.98,
"learning_rate": 1.7013765066685685e-05,
"loss": 4.1467,
"step": 111000
},
{
"epoch": 1.99,
"learning_rate": 1.686517842284193e-05,
"loss": 4.1932,
"step": 111500
},
{
"epoch": 2.0,
"learning_rate": 1.671659177899817e-05,
"loss": 4.1277,
"step": 112000
},
{
"epoch": 2.01,
"learning_rate": 1.656800513515441e-05,
"loss": 4.0758,
"step": 112500
},
{
"epoch": 2.01,
"learning_rate": 1.6419418491310653e-05,
"loss": 4.1676,
"step": 113000
},
{
"epoch": 2.02,
"learning_rate": 1.6270831847466897e-05,
"loss": 4.2034,
"step": 113500
},
{
"epoch": 2.03,
"learning_rate": 1.612224520362314e-05,
"loss": 4.1162,
"step": 114000
},
{
"epoch": 2.04,
"learning_rate": 1.597365855977938e-05,
"loss": 4.1963,
"step": 114500
},
{
"epoch": 2.05,
"learning_rate": 1.582507191593562e-05,
"loss": 4.1657,
"step": 115000
},
{
"epoch": 2.06,
"learning_rate": 1.5676485272091862e-05,
"loss": 4.1668,
"step": 115500
},
{
"epoch": 2.07,
"learning_rate": 1.5527898628248107e-05,
"loss": 4.153,
"step": 116000
},
{
"epoch": 2.08,
"learning_rate": 1.5379311984404344e-05,
"loss": 4.1883,
"step": 116500
},
{
"epoch": 2.09,
"learning_rate": 1.5230725340560589e-05,
"loss": 4.1552,
"step": 117000
},
{
"epoch": 2.1,
"learning_rate": 1.508213869671683e-05,
"loss": 4.1509,
"step": 117500
},
{
"epoch": 2.1,
"learning_rate": 1.4933552052873073e-05,
"loss": 4.1463,
"step": 118000
},
{
"epoch": 2.11,
"learning_rate": 1.4784965409029314e-05,
"loss": 4.1101,
"step": 118500
},
{
"epoch": 2.12,
"learning_rate": 1.4636378765185554e-05,
"loss": 4.085,
"step": 119000
},
{
"epoch": 2.13,
"learning_rate": 1.4487792121341798e-05,
"loss": 4.1117,
"step": 119500
},
{
"epoch": 2.14,
"learning_rate": 1.4339205477498038e-05,
"loss": 4.1806,
"step": 120000
},
{
"epoch": 2.15,
"learning_rate": 1.4190618833654282e-05,
"loss": 4.1498,
"step": 120500
},
{
"epoch": 2.16,
"learning_rate": 1.4042032189810522e-05,
"loss": 4.1366,
"step": 121000
},
{
"epoch": 2.17,
"learning_rate": 1.3893445545966766e-05,
"loss": 4.1916,
"step": 121500
},
{
"epoch": 2.18,
"learning_rate": 1.3744858902123006e-05,
"loss": 4.1232,
"step": 122000
},
{
"epoch": 2.18,
"learning_rate": 1.359627225827925e-05,
"loss": 4.1785,
"step": 122500
},
{
"epoch": 2.19,
"learning_rate": 1.344768561443549e-05,
"loss": 4.1444,
"step": 123000
},
{
"epoch": 2.2,
"learning_rate": 1.3299098970591731e-05,
"loss": 4.1848,
"step": 123500
},
{
"epoch": 2.21,
"learning_rate": 1.3150512326747974e-05,
"loss": 4.1975,
"step": 124000
},
{
"epoch": 2.22,
"learning_rate": 1.3001925682904215e-05,
"loss": 4.1828,
"step": 124500
},
{
"epoch": 2.23,
"learning_rate": 1.2853339039060458e-05,
"loss": 4.1857,
"step": 125000
},
{
"epoch": 2.24,
"learning_rate": 1.2704752395216699e-05,
"loss": 4.1146,
"step": 125500
},
{
"epoch": 2.25,
"learning_rate": 1.2556165751372942e-05,
"loss": 4.1367,
"step": 126000
},
{
"epoch": 2.26,
"learning_rate": 1.2407579107529183e-05,
"loss": 4.1608,
"step": 126500
},
{
"epoch": 2.26,
"learning_rate": 1.2258992463685424e-05,
"loss": 4.1655,
"step": 127000
},
{
"epoch": 2.27,
"learning_rate": 1.2110405819841667e-05,
"loss": 4.158,
"step": 127500
},
{
"epoch": 2.28,
"learning_rate": 1.1961819175997908e-05,
"loss": 4.1302,
"step": 128000
},
{
"epoch": 2.29,
"learning_rate": 1.1813232532154151e-05,
"loss": 4.1538,
"step": 128500
},
{
"epoch": 2.3,
"learning_rate": 1.1664645888310392e-05,
"loss": 4.1838,
"step": 129000
},
{
"epoch": 2.31,
"learning_rate": 1.1516059244466634e-05,
"loss": 4.178,
"step": 129500
},
{
"epoch": 2.32,
"learning_rate": 1.1367472600622876e-05,
"loss": 4.1766,
"step": 130000
},
{
"epoch": 2.33,
"learning_rate": 1.1218885956779118e-05,
"loss": 4.1533,
"step": 130500
},
{
"epoch": 2.34,
"learning_rate": 1.1070299312935359e-05,
"loss": 4.1715,
"step": 131000
},
{
"epoch": 2.34,
"learning_rate": 1.0921712669091602e-05,
"loss": 4.1212,
"step": 131500
},
{
"epoch": 2.35,
"learning_rate": 1.0773126025247843e-05,
"loss": 4.1308,
"step": 132000
},
{
"epoch": 2.36,
"learning_rate": 1.0624539381404084e-05,
"loss": 4.1403,
"step": 132500
},
{
"epoch": 2.37,
"learning_rate": 1.0475952737560327e-05,
"loss": 4.1551,
"step": 133000
},
{
"epoch": 2.38,
"learning_rate": 1.0327366093716568e-05,
"loss": 4.1915,
"step": 133500
},
{
"epoch": 2.39,
"learning_rate": 1.0178779449872811e-05,
"loss": 4.1381,
"step": 134000
},
{
"epoch": 2.4,
"learning_rate": 1.0030192806029052e-05,
"loss": 4.1527,
"step": 134500
},
{
"epoch": 2.41,
"learning_rate": 9.881606162185295e-06,
"loss": 4.1663,
"step": 135000
},
{
"epoch": 2.42,
"learning_rate": 9.733019518341534e-06,
"loss": 4.2214,
"step": 135500
},
{
"epoch": 2.42,
"learning_rate": 9.584432874497777e-06,
"loss": 4.1587,
"step": 136000
},
{
"epoch": 2.43,
"learning_rate": 9.435846230654018e-06,
"loss": 4.1345,
"step": 136500
},
{
"epoch": 2.44,
"learning_rate": 9.287259586810261e-06,
"loss": 4.189,
"step": 137000
},
{
"epoch": 2.45,
"learning_rate": 9.138672942966502e-06,
"loss": 4.1978,
"step": 137500
},
{
"epoch": 2.46,
"learning_rate": 8.990086299122745e-06,
"loss": 4.1403,
"step": 138000
},
{
"epoch": 2.47,
"learning_rate": 8.841499655278986e-06,
"loss": 4.1012,
"step": 138500
},
{
"epoch": 2.48,
"learning_rate": 8.69291301143523e-06,
"loss": 4.1605,
"step": 139000
},
{
"epoch": 2.49,
"learning_rate": 8.54432636759147e-06,
"loss": 4.1597,
"step": 139500
},
{
"epoch": 2.5,
"learning_rate": 8.395739723747713e-06,
"loss": 4.1998,
"step": 140000
},
{
"epoch": 2.51,
"learning_rate": 8.247153079903953e-06,
"loss": 4.1804,
"step": 140500
},
{
"epoch": 2.51,
"learning_rate": 8.098566436060196e-06,
"loss": 4.1377,
"step": 141000
},
{
"epoch": 2.52,
"learning_rate": 7.949979792216437e-06,
"loss": 4.1452,
"step": 141500
},
{
"epoch": 2.53,
"learning_rate": 7.80139314837268e-06,
"loss": 4.1304,
"step": 142000
},
{
"epoch": 2.54,
"learning_rate": 7.652806504528921e-06,
"loss": 4.15,
"step": 142500
},
{
"epoch": 2.55,
"learning_rate": 7.504219860685163e-06,
"loss": 4.1462,
"step": 143000
},
{
"epoch": 2.56,
"learning_rate": 7.355633216841405e-06,
"loss": 4.1741,
"step": 143500
},
{
"epoch": 2.57,
"learning_rate": 7.207046572997647e-06,
"loss": 4.187,
"step": 144000
},
{
"epoch": 2.58,
"learning_rate": 7.058459929153889e-06,
"loss": 4.183,
"step": 144500
},
{
"epoch": 2.59,
"learning_rate": 6.909873285310129e-06,
"loss": 4.0993,
"step": 145000
},
{
"epoch": 2.59,
"learning_rate": 6.761286641466371e-06,
"loss": 4.2255,
"step": 145500
},
{
"epoch": 2.6,
"learning_rate": 6.612699997622613e-06,
"loss": 4.1642,
"step": 146000
},
{
"epoch": 2.61,
"learning_rate": 6.464113353778855e-06,
"loss": 4.1559,
"step": 146500
},
{
"epoch": 2.62,
"learning_rate": 6.315526709935097e-06,
"loss": 4.1859,
"step": 147000
},
{
"epoch": 2.63,
"learning_rate": 6.1669400660913394e-06,
"loss": 4.1573,
"step": 147500
},
{
"epoch": 2.64,
"learning_rate": 6.0183534222475815e-06,
"loss": 4.1746,
"step": 148000
},
{
"epoch": 2.65,
"learning_rate": 5.869766778403823e-06,
"loss": 4.2273,
"step": 148500
},
{
"epoch": 2.66,
"learning_rate": 5.721180134560065e-06,
"loss": 4.1844,
"step": 149000
},
{
"epoch": 2.67,
"learning_rate": 5.572593490716307e-06,
"loss": 4.1636,
"step": 149500
},
{
"epoch": 2.67,
"learning_rate": 5.424006846872549e-06,
"loss": 4.1785,
"step": 150000
},
{
"epoch": 2.68,
"learning_rate": 5.275420203028791e-06,
"loss": 4.2064,
"step": 150500
},
{
"epoch": 2.69,
"learning_rate": 5.126833559185032e-06,
"loss": 4.1103,
"step": 151000
},
{
"epoch": 2.7,
"learning_rate": 4.978246915341274e-06,
"loss": 4.1814,
"step": 151500
},
{
"epoch": 2.71,
"learning_rate": 4.829660271497516e-06,
"loss": 4.1903,
"step": 152000
},
{
"epoch": 2.72,
"learning_rate": 4.681073627653758e-06,
"loss": 4.1319,
"step": 152500
},
{
"epoch": 2.73,
"learning_rate": 4.53248698381e-06,
"loss": 4.127,
"step": 153000
},
{
"epoch": 2.74,
"learning_rate": 4.383900339966241e-06,
"loss": 4.1278,
"step": 153500
},
{
"epoch": 2.75,
"learning_rate": 4.235313696122483e-06,
"loss": 4.1209,
"step": 154000
},
{
"epoch": 2.75,
"learning_rate": 4.086727052278725e-06,
"loss": 4.145,
"step": 154500
},
{
"epoch": 2.76,
"learning_rate": 3.938140408434967e-06,
"loss": 4.1624,
"step": 155000
},
{
"epoch": 2.77,
"learning_rate": 3.7895537645912088e-06,
"loss": 4.1436,
"step": 155500
},
{
"epoch": 2.78,
"learning_rate": 3.6409671207474504e-06,
"loss": 4.1485,
"step": 156000
},
{
"epoch": 2.79,
"learning_rate": 3.4923804769036924e-06,
"loss": 4.1695,
"step": 156500
},
{
"epoch": 2.8,
"learning_rate": 3.343793833059934e-06,
"loss": 4.0884,
"step": 157000
},
{
"epoch": 2.81,
"learning_rate": 3.195207189216176e-06,
"loss": 4.1719,
"step": 157500
},
{
"epoch": 2.82,
"learning_rate": 3.0466205453724176e-06,
"loss": 4.1232,
"step": 158000
},
{
"epoch": 2.83,
"learning_rate": 2.898033901528659e-06,
"loss": 4.1327,
"step": 158500
},
{
"epoch": 2.84,
"learning_rate": 2.749447257684901e-06,
"loss": 4.1568,
"step": 159000
},
{
"epoch": 2.84,
"learning_rate": 2.6008606138411432e-06,
"loss": 4.0888,
"step": 159500
},
{
"epoch": 2.85,
"learning_rate": 2.452273969997385e-06,
"loss": 4.1711,
"step": 160000
},
{
"epoch": 2.86,
"learning_rate": 2.303687326153627e-06,
"loss": 4.2205,
"step": 160500
},
{
"epoch": 2.87,
"learning_rate": 2.1551006823098684e-06,
"loss": 4.1221,
"step": 161000
},
{
"epoch": 2.88,
"learning_rate": 2.0065140384661104e-06,
"loss": 4.1887,
"step": 161500
},
{
"epoch": 2.89,
"learning_rate": 1.8579273946223525e-06,
"loss": 4.138,
"step": 162000
},
{
"epoch": 2.9,
"learning_rate": 1.709340750778594e-06,
"loss": 4.166,
"step": 162500
},
{
"epoch": 2.91,
"learning_rate": 1.5607541069348359e-06,
"loss": 4.1599,
"step": 163000
},
{
"epoch": 2.92,
"learning_rate": 1.4121674630910779e-06,
"loss": 4.1308,
"step": 163500
},
{
"epoch": 2.92,
"learning_rate": 1.2635808192473197e-06,
"loss": 4.2161,
"step": 164000
},
{
"epoch": 2.93,
"learning_rate": 1.1149941754035615e-06,
"loss": 4.1883,
"step": 164500
},
{
"epoch": 2.94,
"learning_rate": 9.66407531559803e-07,
"loss": 4.17,
"step": 165000
},
{
"epoch": 2.95,
"learning_rate": 8.17820887716045e-07,
"loss": 4.1741,
"step": 165500
},
{
"epoch": 2.96,
"learning_rate": 6.692342438722869e-07,
"loss": 4.1797,
"step": 166000
},
{
"epoch": 2.97,
"learning_rate": 5.206476000285286e-07,
"loss": 4.1768,
"step": 166500
},
{
"epoch": 2.98,
"learning_rate": 3.720609561847705e-07,
"loss": 4.1909,
"step": 167000
},
{
"epoch": 2.99,
"learning_rate": 2.234743123410123e-07,
"loss": 4.1689,
"step": 167500
},
{
"epoch": 3.0,
"learning_rate": 7.488766849725412e-08,
"loss": 4.1115,
"step": 168000
},
{
"epoch": 3.0,
"step": 168252,
"total_flos": 2.098762564848768e+16,
"train_loss": 4.168116396368988,
"train_runtime": 19036.7524,
"train_samples_per_second": 17.677,
"train_steps_per_second": 8.838
}
],
"max_steps": 168252,
"num_train_epochs": 3,
"total_flos": 2.098762564848768e+16,
"trial_name": null,
"trial_params": null
}