wisdom-llama3-8b / trainer_state.json
Brown
Upload folder using huggingface_hub
1cc32b5 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 3402,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.008818342151675485,
"grad_norm": 1.4955648753589619,
"learning_rate": 3.883495145631068e-06,
"loss": 0.4452,
"step": 10
},
{
"epoch": 0.01763668430335097,
"grad_norm": 0.8166020313583234,
"learning_rate": 7.766990291262136e-06,
"loss": 0.3563,
"step": 20
},
{
"epoch": 0.026455026455026454,
"grad_norm": 1.0139812433784159,
"learning_rate": 1.1650485436893204e-05,
"loss": 0.2954,
"step": 30
},
{
"epoch": 0.03527336860670194,
"grad_norm": 1.121332128031883,
"learning_rate": 1.5533980582524273e-05,
"loss": 0.264,
"step": 40
},
{
"epoch": 0.04409171075837742,
"grad_norm": 1.5248485572756025,
"learning_rate": 1.9417475728155343e-05,
"loss": 0.2515,
"step": 50
},
{
"epoch": 0.05291005291005291,
"grad_norm": 0.7086102688104752,
"learning_rate": 2.330097087378641e-05,
"loss": 0.2384,
"step": 60
},
{
"epoch": 0.06172839506172839,
"grad_norm": 0.6620513303312378,
"learning_rate": 2.718446601941748e-05,
"loss": 0.2371,
"step": 70
},
{
"epoch": 0.07054673721340388,
"grad_norm": 1.1439083265825398,
"learning_rate": 3.1067961165048545e-05,
"loss": 0.2316,
"step": 80
},
{
"epoch": 0.07936507936507936,
"grad_norm": 0.8671080906528094,
"learning_rate": 3.4951456310679615e-05,
"loss": 0.2243,
"step": 90
},
{
"epoch": 0.08818342151675485,
"grad_norm": 0.8521737444390105,
"learning_rate": 3.8834951456310685e-05,
"loss": 0.2216,
"step": 100
},
{
"epoch": 0.09700176366843033,
"grad_norm": 0.820039351853962,
"learning_rate": 3.9999555645496425e-05,
"loss": 0.2181,
"step": 110
},
{
"epoch": 0.10582010582010581,
"grad_norm": 0.4284687555753216,
"learning_rate": 3.999737926280763e-05,
"loss": 0.2123,
"step": 120
},
{
"epoch": 0.1146384479717813,
"grad_norm": 1.0310561704789347,
"learning_rate": 3.999338943291723e-05,
"loss": 0.2072,
"step": 130
},
{
"epoch": 0.12345679012345678,
"grad_norm": 0.48329839911504663,
"learning_rate": 3.998758651763991e-05,
"loss": 0.2046,
"step": 140
},
{
"epoch": 0.13227513227513227,
"grad_norm": 0.43509409688235956,
"learning_rate": 3.9979971043208656e-05,
"loss": 0.2017,
"step": 150
},
{
"epoch": 0.14109347442680775,
"grad_norm": 0.3421062202275864,
"learning_rate": 3.997054370022701e-05,
"loss": 0.2026,
"step": 160
},
{
"epoch": 0.14991181657848324,
"grad_norm": 0.41797655307441983,
"learning_rate": 3.995930534360642e-05,
"loss": 0.1972,
"step": 170
},
{
"epoch": 0.15873015873015872,
"grad_norm": 0.40272951427585846,
"learning_rate": 3.994625699248876e-05,
"loss": 0.1947,
"step": 180
},
{
"epoch": 0.1675485008818342,
"grad_norm": 0.35340459426606163,
"learning_rate": 3.993139983015388e-05,
"loss": 0.1971,
"step": 190
},
{
"epoch": 0.1763668430335097,
"grad_norm": 0.7994951274374914,
"learning_rate": 3.9914735203912274e-05,
"loss": 0.1925,
"step": 200
},
{
"epoch": 0.18518518518518517,
"grad_norm": 0.40411453618039694,
"learning_rate": 3.989626462498299e-05,
"loss": 0.192,
"step": 210
},
{
"epoch": 0.19400352733686066,
"grad_norm": 0.31941758385605157,
"learning_rate": 3.9875989768356474e-05,
"loss": 0.1868,
"step": 220
},
{
"epoch": 0.20282186948853614,
"grad_norm": 0.3142908369196689,
"learning_rate": 3.9853912472642766e-05,
"loss": 0.1893,
"step": 230
},
{
"epoch": 0.21164021164021163,
"grad_norm": 0.3241679786689458,
"learning_rate": 3.9830034739904716e-05,
"loss": 0.1863,
"step": 240
},
{
"epoch": 0.2204585537918871,
"grad_norm": 0.31178995966174394,
"learning_rate": 3.980435873547643e-05,
"loss": 0.1862,
"step": 250
},
{
"epoch": 0.2292768959435626,
"grad_norm": 0.47323032502490103,
"learning_rate": 3.977688678776695e-05,
"loss": 0.1851,
"step": 260
},
{
"epoch": 0.23809523809523808,
"grad_norm": 0.3265364244937022,
"learning_rate": 3.974762138804906e-05,
"loss": 0.1865,
"step": 270
},
{
"epoch": 0.24691358024691357,
"grad_norm": 0.34222373145685614,
"learning_rate": 3.971656519023337e-05,
"loss": 0.1817,
"step": 280
},
{
"epoch": 0.25573192239858905,
"grad_norm": 0.3739773461101385,
"learning_rate": 3.9683721010627666e-05,
"loss": 0.1818,
"step": 290
},
{
"epoch": 0.26455026455026454,
"grad_norm": 0.30430093336446573,
"learning_rate": 3.964909182768153e-05,
"loss": 0.18,
"step": 300
},
{
"epoch": 0.27336860670194,
"grad_norm": 0.34663011155426887,
"learning_rate": 3.9612680781716203e-05,
"loss": 0.1803,
"step": 310
},
{
"epoch": 0.2821869488536155,
"grad_norm": 0.298542445104257,
"learning_rate": 3.9574491174639824e-05,
"loss": 0.1789,
"step": 320
},
{
"epoch": 0.291005291005291,
"grad_norm": 0.2685161967289745,
"learning_rate": 3.9534526469648e-05,
"loss": 0.178,
"step": 330
},
{
"epoch": 0.2998236331569665,
"grad_norm": 0.39999602620736396,
"learning_rate": 3.949279029090976e-05,
"loss": 0.179,
"step": 340
},
{
"epoch": 0.30864197530864196,
"grad_norm": 0.36622856228147127,
"learning_rate": 3.944928642323887e-05,
"loss": 0.1786,
"step": 350
},
{
"epoch": 0.31746031746031744,
"grad_norm": 0.4663556460380618,
"learning_rate": 3.940401881175065e-05,
"loss": 0.18,
"step": 360
},
{
"epoch": 0.3262786596119929,
"grad_norm": 0.3720323085775275,
"learning_rate": 3.9356991561504177e-05,
"loss": 0.1776,
"step": 370
},
{
"epoch": 0.3350970017636684,
"grad_norm": 0.302010044803705,
"learning_rate": 3.9308208937130046e-05,
"loss": 0.1763,
"step": 380
},
{
"epoch": 0.3439153439153439,
"grad_norm": 0.31665097292956457,
"learning_rate": 3.925767536244362e-05,
"loss": 0.1751,
"step": 390
},
{
"epoch": 0.3527336860670194,
"grad_norm": 0.3228908226384742,
"learning_rate": 3.920539542004387e-05,
"loss": 0.1745,
"step": 400
},
{
"epoch": 0.36155202821869487,
"grad_norm": 0.2799415607873476,
"learning_rate": 3.915137385089781e-05,
"loss": 0.1749,
"step": 410
},
{
"epoch": 0.37037037037037035,
"grad_norm": 0.24387451434550744,
"learning_rate": 3.909561555391053e-05,
"loss": 0.174,
"step": 420
},
{
"epoch": 0.37918871252204583,
"grad_norm": 0.2726005390731001,
"learning_rate": 3.903812558548099e-05,
"loss": 0.1734,
"step": 430
},
{
"epoch": 0.3880070546737213,
"grad_norm": 0.3028874282302824,
"learning_rate": 3.8978909159043465e-05,
"loss": 0.174,
"step": 440
},
{
"epoch": 0.3968253968253968,
"grad_norm": 0.2512628489766056,
"learning_rate": 3.891797164459477e-05,
"loss": 0.1715,
"step": 450
},
{
"epoch": 0.4056437389770723,
"grad_norm": 0.23809767948700475,
"learning_rate": 3.885531856820726e-05,
"loss": 0.1707,
"step": 460
},
{
"epoch": 0.4144620811287478,
"grad_norm": 0.27178090104851893,
"learning_rate": 3.8790955611527754e-05,
"loss": 0.1702,
"step": 470
},
{
"epoch": 0.42328042328042326,
"grad_norm": 0.3849347266422906,
"learning_rate": 3.872488861126226e-05,
"loss": 0.1688,
"step": 480
},
{
"epoch": 0.43209876543209874,
"grad_norm": 0.3597097968078764,
"learning_rate": 3.865712355864668e-05,
"loss": 0.169,
"step": 490
},
{
"epoch": 0.4409171075837742,
"grad_norm": 0.26535671712243575,
"learning_rate": 3.8587666598903526e-05,
"loss": 0.1672,
"step": 500
},
{
"epoch": 0.4497354497354497,
"grad_norm": 0.27483599851665746,
"learning_rate": 3.851652403068461e-05,
"loss": 0.1726,
"step": 510
},
{
"epoch": 0.4585537918871252,
"grad_norm": 0.27278517138668995,
"learning_rate": 3.8443702305499886e-05,
"loss": 0.1704,
"step": 520
},
{
"epoch": 0.4673721340388007,
"grad_norm": 0.24988566985125385,
"learning_rate": 3.836920802713238e-05,
"loss": 0.1671,
"step": 530
},
{
"epoch": 0.47619047619047616,
"grad_norm": 0.34671624980077004,
"learning_rate": 3.829304795103933e-05,
"loss": 0.1672,
"step": 540
},
{
"epoch": 0.48500881834215165,
"grad_norm": 0.309822758612831,
"learning_rate": 3.82152289837396e-05,
"loss": 0.1666,
"step": 550
},
{
"epoch": 0.49382716049382713,
"grad_norm": 0.2735497475576908,
"learning_rate": 3.813575818218732e-05,
"loss": 0.1686,
"step": 560
},
{
"epoch": 0.5026455026455027,
"grad_norm": 0.29690091794159296,
"learning_rate": 3.8054642753132e-05,
"loss": 0.1669,
"step": 570
},
{
"epoch": 0.5114638447971781,
"grad_norm": 0.25476479109627226,
"learning_rate": 3.797189005246489e-05,
"loss": 0.1622,
"step": 580
},
{
"epoch": 0.5202821869488536,
"grad_norm": 0.21847320529770614,
"learning_rate": 3.788750758455204e-05,
"loss": 0.1629,
"step": 590
},
{
"epoch": 0.5291005291005291,
"grad_norm": 0.22282921245577492,
"learning_rate": 3.7801503001553664e-05,
"loss": 0.1682,
"step": 600
},
{
"epoch": 0.5379188712522046,
"grad_norm": 0.2964689468325172,
"learning_rate": 3.771388410273027e-05,
"loss": 0.1672,
"step": 610
},
{
"epoch": 0.54673721340388,
"grad_norm": 0.2828883507477133,
"learning_rate": 3.762465883373539e-05,
"loss": 0.1618,
"step": 620
},
{
"epoch": 0.5555555555555556,
"grad_norm": 0.2856702713626834,
"learning_rate": 3.753383528589499e-05,
"loss": 0.1639,
"step": 630
},
{
"epoch": 0.564373897707231,
"grad_norm": 0.25603423467383013,
"learning_rate": 3.744142169547376e-05,
"loss": 0.1603,
"step": 640
},
{
"epoch": 0.5731922398589065,
"grad_norm": 0.24322397045686334,
"learning_rate": 3.734742644292821e-05,
"loss": 0.1627,
"step": 650
},
{
"epoch": 0.582010582010582,
"grad_norm": 0.21120093154802644,
"learning_rate": 3.725185805214665e-05,
"loss": 0.1639,
"step": 660
},
{
"epoch": 0.5908289241622575,
"grad_norm": 0.2455862358583795,
"learning_rate": 3.7154725189676264e-05,
"loss": 0.1619,
"step": 670
},
{
"epoch": 0.599647266313933,
"grad_norm": 0.2665765386539727,
"learning_rate": 3.7056036663937164e-05,
"loss": 0.165,
"step": 680
},
{
"epoch": 0.6084656084656085,
"grad_norm": 0.23935749033120898,
"learning_rate": 3.695580142442361e-05,
"loss": 0.1614,
"step": 690
},
{
"epoch": 0.6172839506172839,
"grad_norm": 0.23327066612729097,
"learning_rate": 3.685402856089242e-05,
"loss": 0.1618,
"step": 700
},
{
"epoch": 0.6261022927689595,
"grad_norm": 0.2295032987661733,
"learning_rate": 3.675072730253868e-05,
"loss": 0.1655,
"step": 710
},
{
"epoch": 0.6349206349206349,
"grad_norm": 0.2597641502201965,
"learning_rate": 3.6645907017158814e-05,
"loss": 0.1608,
"step": 720
},
{
"epoch": 0.6437389770723104,
"grad_norm": 0.24169971877971796,
"learning_rate": 3.653957721030107e-05,
"loss": 0.1606,
"step": 730
},
{
"epoch": 0.6525573192239859,
"grad_norm": 0.23805929987756733,
"learning_rate": 3.64317475244035e-05,
"loss": 0.1624,
"step": 740
},
{
"epoch": 0.6613756613756614,
"grad_norm": 0.21337182553159947,
"learning_rate": 3.632242773791952e-05,
"loss": 0.1605,
"step": 750
},
{
"epoch": 0.6701940035273368,
"grad_norm": 0.2086929717616856,
"learning_rate": 3.621162776443125e-05,
"loss": 0.1597,
"step": 760
},
{
"epoch": 0.6790123456790124,
"grad_norm": 0.20642071715098376,
"learning_rate": 3.60993576517504e-05,
"loss": 0.1609,
"step": 770
},
{
"epoch": 0.6878306878306878,
"grad_norm": 0.19167244052151497,
"learning_rate": 3.598562758100716e-05,
"loss": 0.1607,
"step": 780
},
{
"epoch": 0.6966490299823633,
"grad_norm": 0.2285439297223176,
"learning_rate": 3.5870447865726913e-05,
"loss": 0.1606,
"step": 790
},
{
"epoch": 0.7054673721340388,
"grad_norm": 0.2173697770388281,
"learning_rate": 3.575382895089495e-05,
"loss": 0.161,
"step": 800
},
{
"epoch": 0.7142857142857143,
"grad_norm": 0.2831703519646522,
"learning_rate": 3.563578141200929e-05,
"loss": 0.1592,
"step": 810
},
{
"epoch": 0.7231040564373897,
"grad_norm": 0.23525021695459578,
"learning_rate": 3.551631595412164e-05,
"loss": 0.1596,
"step": 820
},
{
"epoch": 0.7319223985890653,
"grad_norm": 0.254325105892367,
"learning_rate": 3.539544341086663e-05,
"loss": 0.1565,
"step": 830
},
{
"epoch": 0.7407407407407407,
"grad_norm": 0.20727493642114397,
"learning_rate": 3.5273174743479336e-05,
"loss": 0.1581,
"step": 840
},
{
"epoch": 0.7495590828924162,
"grad_norm": 0.25745565286011807,
"learning_rate": 3.514952103980132e-05,
"loss": 0.1598,
"step": 850
},
{
"epoch": 0.7583774250440917,
"grad_norm": 0.19482500821770854,
"learning_rate": 3.50244935132751e-05,
"loss": 0.1586,
"step": 860
},
{
"epoch": 0.7671957671957672,
"grad_norm": 0.20509823998997453,
"learning_rate": 3.489810350192729e-05,
"loss": 0.1559,
"step": 870
},
{
"epoch": 0.7760141093474426,
"grad_norm": 0.20895652988918806,
"learning_rate": 3.477036246734038e-05,
"loss": 0.1563,
"step": 880
},
{
"epoch": 0.7848324514991182,
"grad_norm": 0.27987607099152345,
"learning_rate": 3.4641281993613406e-05,
"loss": 0.1596,
"step": 890
},
{
"epoch": 0.7936507936507936,
"grad_norm": 0.19730219851608477,
"learning_rate": 3.451087378631143e-05,
"loss": 0.1579,
"step": 900
},
{
"epoch": 0.8024691358024691,
"grad_norm": 0.20906456760570116,
"learning_rate": 3.437914967140401e-05,
"loss": 0.1571,
"step": 910
},
{
"epoch": 0.8112874779541446,
"grad_norm": 0.22886723809980594,
"learning_rate": 3.4246121594192814e-05,
"loss": 0.157,
"step": 920
},
{
"epoch": 0.8201058201058201,
"grad_norm": 0.19417100843050594,
"learning_rate": 3.411180161822831e-05,
"loss": 0.1554,
"step": 930
},
{
"epoch": 0.8289241622574955,
"grad_norm": 0.26409165069315016,
"learning_rate": 3.3976201924215854e-05,
"loss": 0.1557,
"step": 940
},
{
"epoch": 0.8377425044091711,
"grad_norm": 0.25269001142437164,
"learning_rate": 3.3839334808911057e-05,
"loss": 0.155,
"step": 950
},
{
"epoch": 0.8465608465608465,
"grad_norm": 0.28787596764707246,
"learning_rate": 3.370121268400467e-05,
"loss": 0.1554,
"step": 960
},
{
"epoch": 0.855379188712522,
"grad_norm": 0.20073900051671914,
"learning_rate": 3.356184807499705e-05,
"loss": 0.1543,
"step": 970
},
{
"epoch": 0.8641975308641975,
"grad_norm": 0.19523250730479108,
"learning_rate": 3.342125362006226e-05,
"loss": 0.1552,
"step": 980
},
{
"epoch": 0.873015873015873,
"grad_norm": 0.2332670363257663,
"learning_rate": 3.3279442068902024e-05,
"loss": 0.155,
"step": 990
},
{
"epoch": 0.8818342151675485,
"grad_norm": 0.21314128725157566,
"learning_rate": 3.3136426281589525e-05,
"loss": 0.1559,
"step": 1000
},
{
"epoch": 0.890652557319224,
"grad_norm": 0.2073616768080302,
"learning_rate": 3.299221922740318e-05,
"loss": 0.1555,
"step": 1010
},
{
"epoch": 0.8994708994708994,
"grad_norm": 0.20612629267891894,
"learning_rate": 3.284683398365053e-05,
"loss": 0.1544,
"step": 1020
},
{
"epoch": 0.908289241622575,
"grad_norm": 0.192587617407336,
"learning_rate": 3.270028373448237e-05,
"loss": 0.156,
"step": 1030
},
{
"epoch": 0.9171075837742504,
"grad_norm": 0.20076697321476172,
"learning_rate": 3.255258176969711e-05,
"loss": 0.1525,
"step": 1040
},
{
"epoch": 0.9259259259259259,
"grad_norm": 0.20063292329444252,
"learning_rate": 3.2403741483535624e-05,
"loss": 0.1579,
"step": 1050
},
{
"epoch": 0.9347442680776014,
"grad_norm": 0.2188372918686337,
"learning_rate": 3.22537763734666e-05,
"loss": 0.1545,
"step": 1060
},
{
"epoch": 0.9435626102292769,
"grad_norm": 0.2348876410690357,
"learning_rate": 3.210270003896254e-05,
"loss": 0.1523,
"step": 1070
},
{
"epoch": 0.9523809523809523,
"grad_norm": 0.21610514112165158,
"learning_rate": 3.195052618026646e-05,
"loss": 0.1498,
"step": 1080
},
{
"epoch": 0.9611992945326279,
"grad_norm": 0.17805564179617644,
"learning_rate": 3.1797268597149575e-05,
"loss": 0.1533,
"step": 1090
},
{
"epoch": 0.9700176366843033,
"grad_norm": 0.19468985685608828,
"learning_rate": 3.1642941187659784e-05,
"loss": 0.1505,
"step": 1100
},
{
"epoch": 0.9788359788359788,
"grad_norm": 0.17922597209513355,
"learning_rate": 3.1487557946861413e-05,
"loss": 0.154,
"step": 1110
},
{
"epoch": 0.9876543209876543,
"grad_norm": 0.20699394719540626,
"learning_rate": 3.133113296556603e-05,
"loss": 0.1495,
"step": 1120
},
{
"epoch": 0.9964726631393298,
"grad_norm": 0.18289020763400496,
"learning_rate": 3.117368042905466e-05,
"loss": 0.1517,
"step": 1130
},
{
"epoch": 1.0052910052910053,
"grad_norm": 0.2145241959606213,
"learning_rate": 3.101521461579139e-05,
"loss": 0.1409,
"step": 1140
},
{
"epoch": 1.0141093474426808,
"grad_norm": 0.20230933218493735,
"learning_rate": 3.085574989612856e-05,
"loss": 0.1321,
"step": 1150
},
{
"epoch": 1.0229276895943562,
"grad_norm": 0.18244350688332062,
"learning_rate": 3.069530073100356e-05,
"loss": 0.1313,
"step": 1160
},
{
"epoch": 1.0317460317460316,
"grad_norm": 0.19783195547267102,
"learning_rate": 3.053388167062747e-05,
"loss": 0.1321,
"step": 1170
},
{
"epoch": 1.0405643738977073,
"grad_norm": 0.2047114005081859,
"learning_rate": 3.037150735316561e-05,
"loss": 0.1332,
"step": 1180
},
{
"epoch": 1.0493827160493827,
"grad_norm": 0.24595017471664576,
"learning_rate": 3.0208192503410046e-05,
"loss": 0.131,
"step": 1190
},
{
"epoch": 1.0582010582010581,
"grad_norm": 0.23144086447587278,
"learning_rate": 3.0043951931444302e-05,
"loss": 0.1327,
"step": 1200
},
{
"epoch": 1.0670194003527338,
"grad_norm": 0.16985629437769836,
"learning_rate": 2.9878800531300335e-05,
"loss": 0.1337,
"step": 1210
},
{
"epoch": 1.0758377425044092,
"grad_norm": 0.19256405636053653,
"learning_rate": 2.9712753279607852e-05,
"loss": 0.1364,
"step": 1220
},
{
"epoch": 1.0846560846560847,
"grad_norm": 0.18555434282413438,
"learning_rate": 2.9545825234236187e-05,
"loss": 0.1306,
"step": 1230
},
{
"epoch": 1.09347442680776,
"grad_norm": 0.1847579476124522,
"learning_rate": 2.937803153292878e-05,
"loss": 0.1342,
"step": 1240
},
{
"epoch": 1.1022927689594355,
"grad_norm": 0.1834216574450437,
"learning_rate": 2.9209387391930425e-05,
"loss": 0.1341,
"step": 1250
},
{
"epoch": 1.1111111111111112,
"grad_norm": 0.18665818389653582,
"learning_rate": 2.9039908104607395e-05,
"loss": 0.1333,
"step": 1260
},
{
"epoch": 1.1199294532627866,
"grad_norm": 0.22873933014699052,
"learning_rate": 2.8869609040060572e-05,
"loss": 0.1336,
"step": 1270
},
{
"epoch": 1.128747795414462,
"grad_norm": 0.2183113906263531,
"learning_rate": 2.869850564173172e-05,
"loss": 0.1323,
"step": 1280
},
{
"epoch": 1.1375661375661377,
"grad_norm": 0.19099060399929857,
"learning_rate": 2.8526613426002998e-05,
"loss": 0.1319,
"step": 1290
},
{
"epoch": 1.146384479717813,
"grad_norm": 0.23917372520488436,
"learning_rate": 2.835394798078988e-05,
"loss": 0.1321,
"step": 1300
},
{
"epoch": 1.1552028218694885,
"grad_norm": 0.18407428002576853,
"learning_rate": 2.8180524964127573e-05,
"loss": 0.1317,
"step": 1310
},
{
"epoch": 1.164021164021164,
"grad_norm": 0.18430803378192598,
"learning_rate": 2.8006360102751064e-05,
"loss": 0.1322,
"step": 1320
},
{
"epoch": 1.1728395061728394,
"grad_norm": 0.1865135026451199,
"learning_rate": 2.7831469190668974e-05,
"loss": 0.133,
"step": 1330
},
{
"epoch": 1.181657848324515,
"grad_norm": 0.1884073237139698,
"learning_rate": 2.7655868087731295e-05,
"loss": 0.1317,
"step": 1340
},
{
"epoch": 1.1904761904761905,
"grad_norm": 0.20367415474239536,
"learning_rate": 2.7479572718191108e-05,
"loss": 0.1333,
"step": 1350
},
{
"epoch": 1.199294532627866,
"grad_norm": 0.17284748829644184,
"learning_rate": 2.7302599069260572e-05,
"loss": 0.1309,
"step": 1360
},
{
"epoch": 1.2081128747795415,
"grad_norm": 0.19491278141231555,
"learning_rate": 2.712496318966108e-05,
"loss": 0.1314,
"step": 1370
},
{
"epoch": 1.216931216931217,
"grad_norm": 0.18728688430541293,
"learning_rate": 2.69466811881679e-05,
"loss": 0.1334,
"step": 1380
},
{
"epoch": 1.2257495590828924,
"grad_norm": 0.17142414955172516,
"learning_rate": 2.67677692321494e-05,
"loss": 0.1331,
"step": 1390
},
{
"epoch": 1.2345679012345678,
"grad_norm": 0.20066666678092135,
"learning_rate": 2.6588243546100882e-05,
"loss": 0.1313,
"step": 1400
},
{
"epoch": 1.2433862433862433,
"grad_norm": 0.17159972944285481,
"learning_rate": 2.6408120410173296e-05,
"loss": 0.1287,
"step": 1410
},
{
"epoch": 1.252204585537919,
"grad_norm": 0.19860628522583873,
"learning_rate": 2.6227416158696884e-05,
"loss": 0.1297,
"step": 1420
},
{
"epoch": 1.2610229276895943,
"grad_norm": 0.18008904396878098,
"learning_rate": 2.60461471786999e-05,
"loss": 0.1309,
"step": 1430
},
{
"epoch": 1.2698412698412698,
"grad_norm": 0.19386888274171976,
"learning_rate": 2.5864329908422597e-05,
"loss": 0.1292,
"step": 1440
},
{
"epoch": 1.2786596119929454,
"grad_norm": 0.17303097118338565,
"learning_rate": 2.5681980835826503e-05,
"loss": 0.1295,
"step": 1450
},
{
"epoch": 1.2874779541446209,
"grad_norm": 0.2348921814277289,
"learning_rate": 2.5499116497099224e-05,
"loss": 0.1314,
"step": 1460
},
{
"epoch": 1.2962962962962963,
"grad_norm": 0.1778768891302646,
"learning_rate": 2.531575347515492e-05,
"loss": 0.1318,
"step": 1470
},
{
"epoch": 1.3051146384479717,
"grad_norm": 0.17862605604327308,
"learning_rate": 2.5131908398130426e-05,
"loss": 0.1291,
"step": 1480
},
{
"epoch": 1.3139329805996471,
"grad_norm": 0.1840381159551672,
"learning_rate": 2.49475979378774e-05,
"loss": 0.1299,
"step": 1490
},
{
"epoch": 1.3227513227513228,
"grad_norm": 0.20235876379158219,
"learning_rate": 2.476283880845042e-05,
"loss": 0.1333,
"step": 1500
},
{
"epoch": 1.3315696649029982,
"grad_norm": 0.17181294480397433,
"learning_rate": 2.4577647764591276e-05,
"loss": 0.1311,
"step": 1510
},
{
"epoch": 1.3403880070546736,
"grad_norm": 0.17352177231303226,
"learning_rate": 2.4392041600209608e-05,
"loss": 0.1298,
"step": 1520
},
{
"epoch": 1.3492063492063493,
"grad_norm": 0.16851418392357184,
"learning_rate": 2.4206037146859944e-05,
"loss": 0.1266,
"step": 1530
},
{
"epoch": 1.3580246913580247,
"grad_norm": 0.18412108629041574,
"learning_rate": 2.4019651272215338e-05,
"loss": 0.131,
"step": 1540
},
{
"epoch": 1.3668430335097002,
"grad_norm": 0.1643456743151178,
"learning_rate": 2.383290087853775e-05,
"loss": 0.1327,
"step": 1550
},
{
"epoch": 1.3756613756613756,
"grad_norm": 0.16300381228966188,
"learning_rate": 2.364580290114527e-05,
"loss": 0.1328,
"step": 1560
},
{
"epoch": 1.384479717813051,
"grad_norm": 0.19794647549566596,
"learning_rate": 2.3458374306876355e-05,
"loss": 0.1337,
"step": 1570
},
{
"epoch": 1.3932980599647267,
"grad_norm": 0.17467360938800394,
"learning_rate": 2.327063209255121e-05,
"loss": 0.128,
"step": 1580
},
{
"epoch": 1.402116402116402,
"grad_norm": 0.16534045366329517,
"learning_rate": 2.308259328343042e-05,
"loss": 0.1294,
"step": 1590
},
{
"epoch": 1.4109347442680775,
"grad_norm": 0.17164884564245564,
"learning_rate": 2.289427493167107e-05,
"loss": 0.1291,
"step": 1600
},
{
"epoch": 1.4197530864197532,
"grad_norm": 0.17685950000349324,
"learning_rate": 2.270569411478033e-05,
"loss": 0.1274,
"step": 1610
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.1831151884550037,
"learning_rate": 2.251686793406684e-05,
"loss": 0.1297,
"step": 1620
},
{
"epoch": 1.437389770723104,
"grad_norm": 0.17974969366439886,
"learning_rate": 2.2327813513089875e-05,
"loss": 0.1302,
"step": 1630
},
{
"epoch": 1.4462081128747795,
"grad_norm": 0.16155993308368063,
"learning_rate": 2.213854799610649e-05,
"loss": 0.1285,
"step": 1640
},
{
"epoch": 1.455026455026455,
"grad_norm": 0.1654535308338956,
"learning_rate": 2.194908854651683e-05,
"loss": 0.1289,
"step": 1650
},
{
"epoch": 1.4638447971781305,
"grad_norm": 0.16507648912276562,
"learning_rate": 2.1759452345307675e-05,
"loss": 0.1275,
"step": 1660
},
{
"epoch": 1.472663139329806,
"grad_norm": 0.1676790489693593,
"learning_rate": 2.1569656589494386e-05,
"loss": 0.1266,
"step": 1670
},
{
"epoch": 1.4814814814814814,
"grad_norm": 0.1644355202426517,
"learning_rate": 2.137971849056142e-05,
"loss": 0.1271,
"step": 1680
},
{
"epoch": 1.490299823633157,
"grad_norm": 0.16767396631965428,
"learning_rate": 2.1189655272901498e-05,
"loss": 0.1288,
"step": 1690
},
{
"epoch": 1.4991181657848325,
"grad_norm": 0.16721154360000728,
"learning_rate": 2.0999484172253654e-05,
"loss": 0.1274,
"step": 1700
},
{
"epoch": 1.507936507936508,
"grad_norm": 0.17218284188058708,
"learning_rate": 2.08092224341402e-05,
"loss": 0.1268,
"step": 1710
},
{
"epoch": 1.5167548500881836,
"grad_norm": 0.16455812323793684,
"learning_rate": 2.0618887312302838e-05,
"loss": 0.1269,
"step": 1720
},
{
"epoch": 1.5255731922398588,
"grad_norm": 0.18830223142596714,
"learning_rate": 2.042849606713802e-05,
"loss": 0.1327,
"step": 1730
},
{
"epoch": 1.5343915343915344,
"grad_norm": 0.16569892684919732,
"learning_rate": 2.02380659641317e-05,
"loss": 0.1289,
"step": 1740
},
{
"epoch": 1.5432098765432098,
"grad_norm": 0.16686242636674062,
"learning_rate": 2.004761427229363e-05,
"loss": 0.1241,
"step": 1750
},
{
"epoch": 1.5520282186948853,
"grad_norm": 0.17007064306268857,
"learning_rate": 1.985715826259131e-05,
"loss": 0.1283,
"step": 1760
},
{
"epoch": 1.560846560846561,
"grad_norm": 0.17887859439191145,
"learning_rate": 1.966671520638383e-05,
"loss": 0.1268,
"step": 1770
},
{
"epoch": 1.5696649029982364,
"grad_norm": 0.17041240102360516,
"learning_rate": 1.947630237385558e-05,
"loss": 0.1281,
"step": 1780
},
{
"epoch": 1.5784832451499118,
"grad_norm": 0.16320063163341206,
"learning_rate": 1.9285937032450146e-05,
"loss": 0.128,
"step": 1790
},
{
"epoch": 1.5873015873015874,
"grad_norm": 0.17846835386270518,
"learning_rate": 1.909563644530443e-05,
"loss": 0.1264,
"step": 1800
},
{
"epoch": 1.5961199294532626,
"grad_norm": 0.1506424501330394,
"learning_rate": 1.8905417869683113e-05,
"loss": 0.1268,
"step": 1810
},
{
"epoch": 1.6049382716049383,
"grad_norm": 0.15586172611133045,
"learning_rate": 1.871529855541375e-05,
"loss": 0.1249,
"step": 1820
},
{
"epoch": 1.6137566137566137,
"grad_norm": 0.16774679274129406,
"learning_rate": 1.8525295743322453e-05,
"loss": 0.1269,
"step": 1830
},
{
"epoch": 1.6225749559082892,
"grad_norm": 0.1727635954009701,
"learning_rate": 1.8335426663670397e-05,
"loss": 0.1273,
"step": 1840
},
{
"epoch": 1.6313932980599648,
"grad_norm": 0.1892975495137155,
"learning_rate": 1.8145708534591377e-05,
"loss": 0.1255,
"step": 1850
},
{
"epoch": 1.6402116402116402,
"grad_norm": 0.16857751294190276,
"learning_rate": 1.79561585605303e-05,
"loss": 0.1283,
"step": 1860
},
{
"epoch": 1.6490299823633157,
"grad_norm": 0.1718737988430891,
"learning_rate": 1.7766793930683094e-05,
"loss": 0.1254,
"step": 1870
},
{
"epoch": 1.6578483245149913,
"grad_norm": 0.1588131771656951,
"learning_rate": 1.7577631817437888e-05,
"loss": 0.1244,
"step": 1880
},
{
"epoch": 1.6666666666666665,
"grad_norm": 0.16479791762147233,
"learning_rate": 1.7388689374817722e-05,
"loss": 0.1232,
"step": 1890
},
{
"epoch": 1.6754850088183422,
"grad_norm": 0.20327562061299326,
"learning_rate": 1.719998373692499e-05,
"loss": 0.1259,
"step": 1900
},
{
"epoch": 1.6843033509700176,
"grad_norm": 0.17445146984586163,
"learning_rate": 1.7011532016387637e-05,
"loss": 0.1241,
"step": 1910
},
{
"epoch": 1.693121693121693,
"grad_norm": 0.15172765768609214,
"learning_rate": 1.682335130280728e-05,
"loss": 0.1248,
"step": 1920
},
{
"epoch": 1.7019400352733687,
"grad_norm": 0.15540418406256226,
"learning_rate": 1.663545866120949e-05,
"loss": 0.1209,
"step": 1930
},
{
"epoch": 1.710758377425044,
"grad_norm": 0.1614644793216772,
"learning_rate": 1.6447871130496257e-05,
"loss": 0.1239,
"step": 1940
},
{
"epoch": 1.7195767195767195,
"grad_norm": 0.15504804793224192,
"learning_rate": 1.62606057219008e-05,
"loss": 0.1261,
"step": 1950
},
{
"epoch": 1.7283950617283952,
"grad_norm": 0.17006738174447467,
"learning_rate": 1.6073679417444946e-05,
"loss": 0.1266,
"step": 1960
},
{
"epoch": 1.7372134038800704,
"grad_norm": 0.17505387505969047,
"learning_rate": 1.588710916839914e-05,
"loss": 0.1225,
"step": 1970
},
{
"epoch": 1.746031746031746,
"grad_norm": 0.1612549181660387,
"learning_rate": 1.570091189374518e-05,
"loss": 0.1249,
"step": 1980
},
{
"epoch": 1.7548500881834215,
"grad_norm": 0.16112907861897668,
"learning_rate": 1.5515104478641975e-05,
"loss": 0.1254,
"step": 1990
},
{
"epoch": 1.763668430335097,
"grad_norm": 0.15922476014447312,
"learning_rate": 1.5329703772894327e-05,
"loss": 0.1236,
"step": 2000
},
{
"epoch": 1.7724867724867726,
"grad_norm": 0.17212069125892313,
"learning_rate": 1.51447265894249e-05,
"loss": 0.1242,
"step": 2010
},
{
"epoch": 1.781305114638448,
"grad_norm": 0.1740636826222426,
"learning_rate": 1.4960189702749551e-05,
"loss": 0.1245,
"step": 2020
},
{
"epoch": 1.7901234567901234,
"grad_norm": 0.15528067596347245,
"learning_rate": 1.4776109847456173e-05,
"loss": 0.1226,
"step": 2030
},
{
"epoch": 1.798941798941799,
"grad_norm": 0.15722708188146675,
"learning_rate": 1.4592503716687107e-05,
"loss": 0.1243,
"step": 2040
},
{
"epoch": 1.8077601410934743,
"grad_norm": 0.15918925574073986,
"learning_rate": 1.4409387960625331e-05,
"loss": 0.1216,
"step": 2050
},
{
"epoch": 1.81657848324515,
"grad_norm": 0.1576457624630644,
"learning_rate": 1.422677918498459e-05,
"loss": 0.1249,
"step": 2060
},
{
"epoch": 1.8253968253968254,
"grad_norm": 0.18618951180895943,
"learning_rate": 1.4044693949503465e-05,
"loss": 0.1251,
"step": 2070
},
{
"epoch": 1.8342151675485008,
"grad_norm": 0.15865872308397924,
"learning_rate": 1.3863148766443723e-05,
"loss": 0.1228,
"step": 2080
},
{
"epoch": 1.8430335097001764,
"grad_norm": 0.15045663896792358,
"learning_rate": 1.368216009909286e-05,
"loss": 0.1221,
"step": 2090
},
{
"epoch": 1.8518518518518519,
"grad_norm": 0.15818210790519696,
"learning_rate": 1.3501744360271177e-05,
"loss": 0.123,
"step": 2100
},
{
"epoch": 1.8606701940035273,
"grad_norm": 0.16422734107091674,
"learning_rate": 1.3321917910843394e-05,
"loss": 0.1187,
"step": 2110
},
{
"epoch": 1.869488536155203,
"grad_norm": 0.15177335025772304,
"learning_rate": 1.3142697058234952e-05,
"loss": 0.1222,
"step": 2120
},
{
"epoch": 1.8783068783068781,
"grad_norm": 0.15321330650093545,
"learning_rate": 1.2964098054953209e-05,
"loss": 0.1205,
"step": 2130
},
{
"epoch": 1.8871252204585538,
"grad_norm": 0.16140622128902943,
"learning_rate": 1.2786137097113594e-05,
"loss": 0.1194,
"step": 2140
},
{
"epoch": 1.8959435626102292,
"grad_norm": 0.15735771104485857,
"learning_rate": 1.2608830322970865e-05,
"loss": 0.1234,
"step": 2150
},
{
"epoch": 1.9047619047619047,
"grad_norm": 0.15949016923062945,
"learning_rate": 1.2432193811455609e-05,
"loss": 0.1237,
"step": 2160
},
{
"epoch": 1.9135802469135803,
"grad_norm": 0.16001139592539343,
"learning_rate": 1.2256243580716187e-05,
"loss": 0.1216,
"step": 2170
},
{
"epoch": 1.9223985890652557,
"grad_norm": 0.14752520035248345,
"learning_rate": 1.2080995586666085e-05,
"loss": 0.122,
"step": 2180
},
{
"epoch": 1.9312169312169312,
"grad_norm": 0.15247438606628488,
"learning_rate": 1.1906465721536984e-05,
"loss": 0.1236,
"step": 2190
},
{
"epoch": 1.9400352733686068,
"grad_norm": 0.15289569423666619,
"learning_rate": 1.17326698124376e-05,
"loss": 0.1214,
"step": 2200
},
{
"epoch": 1.948853615520282,
"grad_norm": 0.17526380738963238,
"learning_rate": 1.1559623619918396e-05,
"loss": 0.1212,
"step": 2210
},
{
"epoch": 1.9576719576719577,
"grad_norm": 0.16261170275972142,
"learning_rate": 1.1387342836542353e-05,
"loss": 0.1203,
"step": 2220
},
{
"epoch": 1.966490299823633,
"grad_norm": 0.16169651658483872,
"learning_rate": 1.1215843085461912e-05,
"loss": 0.1187,
"step": 2230
},
{
"epoch": 1.9753086419753085,
"grad_norm": 0.1595833660103317,
"learning_rate": 1.1045139919002181e-05,
"loss": 0.1185,
"step": 2240
},
{
"epoch": 1.9841269841269842,
"grad_norm": 0.15303519805668522,
"learning_rate": 1.087524881725059e-05,
"loss": 0.1223,
"step": 2250
},
{
"epoch": 1.9929453262786596,
"grad_norm": 0.16239859350306318,
"learning_rate": 1.0706185186653108e-05,
"loss": 0.1195,
"step": 2260
},
{
"epoch": 2.001763668430335,
"grad_norm": 0.1948721740831915,
"learning_rate": 1.0537964358617114e-05,
"loss": 0.1145,
"step": 2270
},
{
"epoch": 2.0105820105820107,
"grad_norm": 0.17072323405727977,
"learning_rate": 1.0370601588121052e-05,
"loss": 0.0971,
"step": 2280
},
{
"epoch": 2.019400352733686,
"grad_norm": 0.1730651246941294,
"learning_rate": 1.0204112052331097e-05,
"loss": 0.0983,
"step": 2290
},
{
"epoch": 2.0282186948853616,
"grad_norm": 0.15846505516821519,
"learning_rate": 1.0038510849224758e-05,
"loss": 0.097,
"step": 2300
},
{
"epoch": 2.037037037037037,
"grad_norm": 0.18557277607745912,
"learning_rate": 9.87381299622181e-06,
"loss": 0.0942,
"step": 2310
},
{
"epoch": 2.0458553791887124,
"grad_norm": 0.16362091724207117,
"learning_rate": 9.710033428822368e-06,
"loss": 0.0952,
"step": 2320
},
{
"epoch": 2.054673721340388,
"grad_norm": 0.16144197990009226,
"learning_rate": 9.547186999252538e-06,
"loss": 0.0966,
"step": 2330
},
{
"epoch": 2.0634920634920633,
"grad_norm": 0.1703392645828158,
"learning_rate": 9.385288475117526e-06,
"loss": 0.0947,
"step": 2340
},
{
"epoch": 2.072310405643739,
"grad_norm": 0.18472104722259497,
"learning_rate": 9.224352538062441e-06,
"loss": 0.0961,
"step": 2350
},
{
"epoch": 2.0811287477954146,
"grad_norm": 0.16657694584089722,
"learning_rate": 9.06439378244091e-06,
"loss": 0.0935,
"step": 2360
},
{
"epoch": 2.0899470899470898,
"grad_norm": 0.1640060501350356,
"learning_rate": 8.905426713991609e-06,
"loss": 0.098,
"step": 2370
},
{
"epoch": 2.0987654320987654,
"grad_norm": 0.155343849676478,
"learning_rate": 8.747465748522824e-06,
"loss": 0.0929,
"step": 2380
},
{
"epoch": 2.107583774250441,
"grad_norm": 0.16657876804825647,
"learning_rate": 8.590525210605125e-06,
"loss": 0.0938,
"step": 2390
},
{
"epoch": 2.1164021164021163,
"grad_norm": 0.1626875240950099,
"learning_rate": 8.434619332272412e-06,
"loss": 0.0933,
"step": 2400
},
{
"epoch": 2.125220458553792,
"grad_norm": 0.1613598792215022,
"learning_rate": 8.279762251731246e-06,
"loss": 0.0944,
"step": 2410
},
{
"epoch": 2.1340388007054676,
"grad_norm": 0.1638238651603253,
"learning_rate": 8.125968012078745e-06,
"loss": 0.092,
"step": 2420
},
{
"epoch": 2.142857142857143,
"grad_norm": 0.16977220285542402,
"learning_rate": 7.973250560029132e-06,
"loss": 0.0939,
"step": 2430
},
{
"epoch": 2.1516754850088184,
"grad_norm": 0.16753471017264823,
"learning_rate": 7.821623744648952e-06,
"loss": 0.0917,
"step": 2440
},
{
"epoch": 2.1604938271604937,
"grad_norm": 0.16181368054619877,
"learning_rate": 7.671101316101172e-06,
"loss": 0.0942,
"step": 2450
},
{
"epoch": 2.1693121693121693,
"grad_norm": 0.16568414134323833,
"learning_rate": 7.521696924398303e-06,
"loss": 0.0947,
"step": 2460
},
{
"epoch": 2.178130511463845,
"grad_norm": 0.15925917816119767,
"learning_rate": 7.373424118164501e-06,
"loss": 0.0944,
"step": 2470
},
{
"epoch": 2.18694885361552,
"grad_norm": 0.16349944720912132,
"learning_rate": 7.226296343406974e-06,
"loss": 0.0941,
"step": 2480
},
{
"epoch": 2.195767195767196,
"grad_norm": 0.16785075128125135,
"learning_rate": 7.0803269422966135e-06,
"loss": 0.0922,
"step": 2490
},
{
"epoch": 2.204585537918871,
"grad_norm": 0.15912892496071598,
"learning_rate": 6.935529151958065e-06,
"loss": 0.0948,
"step": 2500
},
{
"epoch": 2.2134038800705467,
"grad_norm": 0.16142401500311457,
"learning_rate": 6.791916103269333e-06,
"loss": 0.0932,
"step": 2510
},
{
"epoch": 2.2222222222222223,
"grad_norm": 0.1664762727497597,
"learning_rate": 6.649500819671047e-06,
"loss": 0.0955,
"step": 2520
},
{
"epoch": 2.2310405643738975,
"grad_norm": 0.16004719129428444,
"learning_rate": 6.5082962159853834e-06,
"loss": 0.0898,
"step": 2530
},
{
"epoch": 2.239858906525573,
"grad_norm": 0.1615455245606165,
"learning_rate": 6.368315097244955e-06,
"loss": 0.0917,
"step": 2540
},
{
"epoch": 2.248677248677249,
"grad_norm": 0.16743450359900008,
"learning_rate": 6.2295701575315594e-06,
"loss": 0.0916,
"step": 2550
},
{
"epoch": 2.257495590828924,
"grad_norm": 0.16544121841662957,
"learning_rate": 6.092073978825022e-06,
"loss": 0.0927,
"step": 2560
},
{
"epoch": 2.2663139329805997,
"grad_norm": 0.16180251542385612,
"learning_rate": 5.955839029862245e-06,
"loss": 0.0946,
"step": 2570
},
{
"epoch": 2.2751322751322753,
"grad_norm": 0.15338708773074522,
"learning_rate": 5.820877665006441e-06,
"loss": 0.0926,
"step": 2580
},
{
"epoch": 2.2839506172839505,
"grad_norm": 0.15957923017749945,
"learning_rate": 5.687202123126832e-06,
"loss": 0.0952,
"step": 2590
},
{
"epoch": 2.292768959435626,
"grad_norm": 0.15484396525882926,
"learning_rate": 5.5548245264887515e-06,
"loss": 0.0929,
"step": 2600
},
{
"epoch": 2.3015873015873014,
"grad_norm": 0.15944927489262736,
"learning_rate": 5.423756879654343e-06,
"loss": 0.092,
"step": 2610
},
{
"epoch": 2.310405643738977,
"grad_norm": 0.1555885456859465,
"learning_rate": 5.294011068393945e-06,
"loss": 0.0932,
"step": 2620
},
{
"epoch": 2.3192239858906527,
"grad_norm": 0.1616989709098896,
"learning_rate": 5.1655988586082535e-06,
"loss": 0.0944,
"step": 2630
},
{
"epoch": 2.328042328042328,
"grad_norm": 0.15595636612871588,
"learning_rate": 5.038531895261301e-06,
"loss": 0.094,
"step": 2640
},
{
"epoch": 2.3368606701940036,
"grad_norm": 0.1773185191439187,
"learning_rate": 4.912821701324479e-06,
"loss": 0.0924,
"step": 2650
},
{
"epoch": 2.3456790123456788,
"grad_norm": 0.16579054625602957,
"learning_rate": 4.788479676731581e-06,
"loss": 0.0936,
"step": 2660
},
{
"epoch": 2.3544973544973544,
"grad_norm": 0.15352554844705696,
"learning_rate": 4.665517097344976e-06,
"loss": 0.0935,
"step": 2670
},
{
"epoch": 2.36331569664903,
"grad_norm": 0.15451348902863254,
"learning_rate": 4.543945113933099e-06,
"loss": 0.0931,
"step": 2680
},
{
"epoch": 2.3721340388007053,
"grad_norm": 0.16299047928042187,
"learning_rate": 4.423774751159247e-06,
"loss": 0.0937,
"step": 2690
},
{
"epoch": 2.380952380952381,
"grad_norm": 0.15558927808811412,
"learning_rate": 4.305016906581811e-06,
"loss": 0.0906,
"step": 2700
},
{
"epoch": 2.3897707231040566,
"grad_norm": 0.15763428447923092,
"learning_rate": 4.1876823496660245e-06,
"loss": 0.0919,
"step": 2710
},
{
"epoch": 2.398589065255732,
"grad_norm": 0.1614586373848465,
"learning_rate": 4.071781720807375e-06,
"loss": 0.0929,
"step": 2720
},
{
"epoch": 2.4074074074074074,
"grad_norm": 0.15663328679730767,
"learning_rate": 3.9573255303666554e-06,
"loss": 0.0911,
"step": 2730
},
{
"epoch": 2.416225749559083,
"grad_norm": 0.15190824093071106,
"learning_rate": 3.844324157716857e-06,
"loss": 0.0906,
"step": 2740
},
{
"epoch": 2.4250440917107583,
"grad_norm": 0.15885908725225142,
"learning_rate": 3.7327878503019243e-06,
"loss": 0.0903,
"step": 2750
},
{
"epoch": 2.433862433862434,
"grad_norm": 0.15186092891507827,
"learning_rate": 3.622726722707479e-06,
"loss": 0.09,
"step": 2760
},
{
"epoch": 2.442680776014109,
"grad_norm": 0.15364028795008292,
"learning_rate": 3.5141507557435504e-06,
"loss": 0.093,
"step": 2770
},
{
"epoch": 2.451499118165785,
"grad_norm": 0.15589229219750106,
"learning_rate": 3.4070697955395326e-06,
"loss": 0.0897,
"step": 2780
},
{
"epoch": 2.4603174603174605,
"grad_norm": 0.15718297314700777,
"learning_rate": 3.3014935526512383e-06,
"loss": 0.0922,
"step": 2790
},
{
"epoch": 2.4691358024691357,
"grad_norm": 0.15723729478788426,
"learning_rate": 3.197431601180343e-06,
"loss": 0.0924,
"step": 2800
},
{
"epoch": 2.4779541446208113,
"grad_norm": 0.1600488009457486,
"learning_rate": 3.094893377906143e-06,
"loss": 0.0897,
"step": 2810
},
{
"epoch": 2.4867724867724865,
"grad_norm": 0.1545448158355294,
"learning_rate": 2.9938881814298002e-06,
"loss": 0.0901,
"step": 2820
},
{
"epoch": 2.495590828924162,
"grad_norm": 0.15517515088748454,
"learning_rate": 2.894425171331112e-06,
"loss": 0.0918,
"step": 2830
},
{
"epoch": 2.504409171075838,
"grad_norm": 0.15614342046875468,
"learning_rate": 2.796513367337854e-06,
"loss": 0.0883,
"step": 2840
},
{
"epoch": 2.5132275132275135,
"grad_norm": 0.1532975772545578,
"learning_rate": 2.7001616485078553e-06,
"loss": 0.0937,
"step": 2850
},
{
"epoch": 2.5220458553791887,
"grad_norm": 0.15407944254916026,
"learning_rate": 2.6053787524238084e-06,
"loss": 0.0881,
"step": 2860
},
{
"epoch": 2.5308641975308643,
"grad_norm": 0.15187561548545364,
"learning_rate": 2.512173274400904e-06,
"loss": 0.0883,
"step": 2870
},
{
"epoch": 2.5396825396825395,
"grad_norm": 0.15129893026103466,
"learning_rate": 2.4205536667073572e-06,
"loss": 0.0914,
"step": 2880
},
{
"epoch": 2.548500881834215,
"grad_norm": 0.15387910729749937,
"learning_rate": 2.330528237797949e-06,
"loss": 0.0916,
"step": 2890
},
{
"epoch": 2.557319223985891,
"grad_norm": 0.15310039839687165,
"learning_rate": 2.242105151560554e-06,
"loss": 0.0897,
"step": 2900
},
{
"epoch": 2.566137566137566,
"grad_norm": 0.15660569630223034,
"learning_rate": 2.1552924265758102e-06,
"loss": 0.0883,
"step": 2910
},
{
"epoch": 2.5749559082892417,
"grad_norm": 0.15625668536136728,
"learning_rate": 2.070097935389974e-06,
"loss": 0.088,
"step": 2920
},
{
"epoch": 2.583774250440917,
"grad_norm": 0.16440666779797036,
"learning_rate": 1.9865294038009984e-06,
"loss": 0.09,
"step": 2930
},
{
"epoch": 2.5925925925925926,
"grad_norm": 0.15497386680361938,
"learning_rate": 1.9045944101579073e-06,
"loss": 0.0896,
"step": 2940
},
{
"epoch": 2.601410934744268,
"grad_norm": 0.1528557591079046,
"learning_rate": 1.8243003846735807e-06,
"loss": 0.0916,
"step": 2950
},
{
"epoch": 2.6102292768959434,
"grad_norm": 0.14797025757103924,
"learning_rate": 1.7456546087509419e-06,
"loss": 0.0882,
"step": 2960
},
{
"epoch": 2.619047619047619,
"grad_norm": 0.15101812244580526,
"learning_rate": 1.6686642143226484e-06,
"loss": 0.0892,
"step": 2970
},
{
"epoch": 2.6278659611992943,
"grad_norm": 0.14853644829002635,
"learning_rate": 1.593336183204346e-06,
"loss": 0.0884,
"step": 2980
},
{
"epoch": 2.63668430335097,
"grad_norm": 0.1547300718526592,
"learning_rate": 1.5196773464615167e-06,
"loss": 0.0902,
"step": 2990
},
{
"epoch": 2.6455026455026456,
"grad_norm": 0.14824004323134074,
"learning_rate": 1.4476943837900105e-06,
"loss": 0.0915,
"step": 3000
},
{
"epoch": 2.6543209876543212,
"grad_norm": 0.15162108864434684,
"learning_rate": 1.377393822910318e-06,
"loss": 0.0904,
"step": 3010
},
{
"epoch": 2.6631393298059964,
"grad_norm": 0.1539793202148526,
"learning_rate": 1.30878203897558e-06,
"loss": 0.0883,
"step": 3020
},
{
"epoch": 2.671957671957672,
"grad_norm": 0.15086750725756884,
"learning_rate": 1.241865253993495e-06,
"loss": 0.0899,
"step": 3030
},
{
"epoch": 2.6807760141093473,
"grad_norm": 0.1555053078276991,
"learning_rate": 1.1766495362620555e-06,
"loss": 0.09,
"step": 3040
},
{
"epoch": 2.689594356261023,
"grad_norm": 0.15052416992823459,
"learning_rate": 1.1131407998192567e-06,
"loss": 0.089,
"step": 3050
},
{
"epoch": 2.6984126984126986,
"grad_norm": 0.1504945657873999,
"learning_rate": 1.0513448039067954e-06,
"loss": 0.0892,
"step": 3060
},
{
"epoch": 2.707231040564374,
"grad_norm": 0.1478376395173767,
"learning_rate": 9.91267152447779e-07,
"loss": 0.0878,
"step": 3070
},
{
"epoch": 2.7160493827160495,
"grad_norm": 0.1509329096731571,
"learning_rate": 9.329132935385577e-07,
"loss": 0.0899,
"step": 3080
},
{
"epoch": 2.7248677248677247,
"grad_norm": 0.15222497781820243,
"learning_rate": 8.7628851895466e-07,
"loss": 0.0916,
"step": 3090
},
{
"epoch": 2.7336860670194003,
"grad_norm": 0.15030672400304343,
"learning_rate": 8.213979636709046e-07,
"loss": 0.0922,
"step": 3100
},
{
"epoch": 2.742504409171076,
"grad_norm": 0.15512035164112659,
"learning_rate": 7.682466053957438e-07,
"loss": 0.0888,
"step": 3110
},
{
"epoch": 2.751322751322751,
"grad_norm": 0.14852112467156292,
"learning_rate": 7.168392641198796e-07,
"loss": 0.0869,
"step": 3120
},
{
"epoch": 2.760141093474427,
"grad_norm": 0.15336396025005739,
"learning_rate": 6.671806016791337e-07,
"loss": 0.0877,
"step": 3130
},
{
"epoch": 2.768959435626102,
"grad_norm": 0.1503484462634625,
"learning_rate": 6.192751213317261e-07,
"loss": 0.0869,
"step": 3140
},
{
"epoch": 2.7777777777777777,
"grad_norm": 0.15301298135075012,
"learning_rate": 5.731271673498807e-07,
"loss": 0.0876,
"step": 3150
},
{
"epoch": 2.7865961199294533,
"grad_norm": 0.14946491260070807,
"learning_rate": 5.287409246258724e-07,
"loss": 0.0867,
"step": 3160
},
{
"epoch": 2.795414462081129,
"grad_norm": 0.15544234491849743,
"learning_rate": 4.861204182925238e-07,
"loss": 0.0861,
"step": 3170
},
{
"epoch": 2.804232804232804,
"grad_norm": 0.14786323558106976,
"learning_rate": 4.452695133581908e-07,
"loss": 0.0897,
"step": 3180
},
{
"epoch": 2.81305114638448,
"grad_norm": 0.15035294545385944,
"learning_rate": 4.0619191435626647e-07,
"loss": 0.0898,
"step": 3190
},
{
"epoch": 2.821869488536155,
"grad_norm": 0.14702281322330948,
"learning_rate": 3.688911650092375e-07,
"loss": 0.0892,
"step": 3200
},
{
"epoch": 2.8306878306878307,
"grad_norm": 0.1490187614425559,
"learning_rate": 3.333706479073229e-07,
"loss": 0.0904,
"step": 3210
},
{
"epoch": 2.8395061728395063,
"grad_norm": 0.14599915262723653,
"learning_rate": 2.996335842017306e-07,
"loss": 0.0915,
"step": 3220
},
{
"epoch": 2.8483245149911816,
"grad_norm": 0.1472537352263344,
"learning_rate": 2.676830333125424e-07,
"loss": 0.089,
"step": 3230
},
{
"epoch": 2.857142857142857,
"grad_norm": 0.1444247131200575,
"learning_rate": 2.3752189265128234e-07,
"loss": 0.0868,
"step": 3240
},
{
"epoch": 2.8659611992945324,
"grad_norm": 0.15100512121725188,
"learning_rate": 2.0915289735816247e-07,
"loss": 0.0891,
"step": 3250
},
{
"epoch": 2.874779541446208,
"grad_norm": 0.14909989384814895,
"learning_rate": 1.8257862005404802e-07,
"loss": 0.0898,
"step": 3260
},
{
"epoch": 2.8835978835978837,
"grad_norm": 0.1501123498119351,
"learning_rate": 1.5780147060715955e-07,
"loss": 0.0877,
"step": 3270
},
{
"epoch": 2.892416225749559,
"grad_norm": 0.14724312312335958,
"learning_rate": 1.3482369591453882e-07,
"loss": 0.0911,
"step": 3280
},
{
"epoch": 2.9012345679012346,
"grad_norm": 0.14800693250740457,
"learning_rate": 1.1364737969829176e-07,
"loss": 0.0909,
"step": 3290
},
{
"epoch": 2.91005291005291,
"grad_norm": 0.14676977153334295,
"learning_rate": 9.427444231662863e-08,
"loss": 0.0862,
"step": 3300
},
{
"epoch": 2.9188712522045854,
"grad_norm": 0.14812710257168704,
"learning_rate": 7.670664058970545e-08,
"loss": 0.0875,
"step": 3310
},
{
"epoch": 2.927689594356261,
"grad_norm": 0.1488187956001474,
"learning_rate": 6.094556764032922e-08,
"loss": 0.0886,
"step": 3320
},
{
"epoch": 2.9365079365079367,
"grad_norm": 0.14589793378350213,
"learning_rate": 4.699265274946907e-08,
"loss": 0.0865,
"step": 3330
},
{
"epoch": 2.945326278659612,
"grad_norm": 0.14997064517034403,
"learning_rate": 3.484916122664883e-08,
"loss": 0.0889,
"step": 3340
},
{
"epoch": 2.9541446208112876,
"grad_norm": 0.14722441270349446,
"learning_rate": 2.4516194295205466e-08,
"loss": 0.0899,
"step": 3350
},
{
"epoch": 2.962962962962963,
"grad_norm": 0.14896127171056997,
"learning_rate": 1.5994688992424513e-08,
"loss": 0.0895,
"step": 3360
},
{
"epoch": 2.9717813051146384,
"grad_norm": 0.14761644929430634,
"learning_rate": 9.285418084565845e-09,
"loss": 0.0886,
"step": 3370
},
{
"epoch": 2.980599647266314,
"grad_norm": 0.14489081475734752,
"learning_rate": 4.3889899967797244e-09,
"loss": 0.0886,
"step": 3380
},
{
"epoch": 2.9894179894179893,
"grad_norm": 0.1482366815617573,
"learning_rate": 1.3058487579420409e-09,
"loss": 0.0888,
"step": 3390
},
{
"epoch": 2.998236331569665,
"grad_norm": 0.15214422750713533,
"learning_rate": 3.627396038208275e-11,
"loss": 0.0869,
"step": 3400
},
{
"epoch": 3.0,
"step": 3402,
"total_flos": 7122048987955200.0,
"train_loss": 0.13241534099762473,
"train_runtime": 27380.0053,
"train_samples_per_second": 19.876,
"train_steps_per_second": 0.124
}
],
"logging_steps": 10,
"max_steps": 3402,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 7122048987955200.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}