llama-3.1-8b-table-sft-lora / trainer_state.json
6uvsoomJ's picture
Upload folder using huggingface_hub
82de258 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.982343499197432,
"eval_steps": 500,
"global_step": 385,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.012841091492776886,
"grad_norm": 0.40566644072532654,
"learning_rate": 1.6666666666666667e-05,
"loss": 0.2239,
"step": 1
},
{
"epoch": 0.025682182985553772,
"grad_norm": 0.5796589851379395,
"learning_rate": 3.3333333333333335e-05,
"loss": 0.2932,
"step": 2
},
{
"epoch": 0.038523274478330656,
"grad_norm": 0.620160698890686,
"learning_rate": 5e-05,
"loss": 0.3059,
"step": 3
},
{
"epoch": 0.051364365971107544,
"grad_norm": 0.4742761254310608,
"learning_rate": 6.666666666666667e-05,
"loss": 0.2751,
"step": 4
},
{
"epoch": 0.06420545746388442,
"grad_norm": 0.39537593722343445,
"learning_rate": 8.333333333333334e-05,
"loss": 0.2436,
"step": 5
},
{
"epoch": 0.07704654895666131,
"grad_norm": 0.33797481656074524,
"learning_rate": 0.0001,
"loss": 0.2592,
"step": 6
},
{
"epoch": 0.0898876404494382,
"grad_norm": 0.3221365511417389,
"learning_rate": 0.00011666666666666668,
"loss": 0.2416,
"step": 7
},
{
"epoch": 0.10272873194221509,
"grad_norm": 0.28052330017089844,
"learning_rate": 0.00013333333333333334,
"loss": 0.2092,
"step": 8
},
{
"epoch": 0.11556982343499198,
"grad_norm": 0.29556986689567566,
"learning_rate": 0.00015000000000000001,
"loss": 0.2056,
"step": 9
},
{
"epoch": 0.12841091492776885,
"grad_norm": 0.30732303857803345,
"learning_rate": 0.0001666666666666667,
"loss": 0.1879,
"step": 10
},
{
"epoch": 0.14125200642054575,
"grad_norm": 0.30247950553894043,
"learning_rate": 0.00018333333333333334,
"loss": 0.1839,
"step": 11
},
{
"epoch": 0.15409309791332262,
"grad_norm": 0.3433546721935272,
"learning_rate": 0.0002,
"loss": 0.1908,
"step": 12
},
{
"epoch": 0.16693418940609953,
"grad_norm": 0.19485895335674286,
"learning_rate": 0.00019999645309530845,
"loss": 0.1743,
"step": 13
},
{
"epoch": 0.1797752808988764,
"grad_norm": 0.18382272124290466,
"learning_rate": 0.00019998581263284444,
"loss": 0.1843,
"step": 14
},
{
"epoch": 0.1926163723916533,
"grad_norm": 0.1688140630722046,
"learning_rate": 0.00019996807936742208,
"loss": 0.1765,
"step": 15
},
{
"epoch": 0.20545746388443017,
"grad_norm": 0.1830463707447052,
"learning_rate": 0.00019994325455700542,
"loss": 0.1922,
"step": 16
},
{
"epoch": 0.21829855537720708,
"grad_norm": 0.22317859530448914,
"learning_rate": 0.00019991133996261922,
"loss": 0.2321,
"step": 17
},
{
"epoch": 0.23113964686998395,
"grad_norm": 0.17708703875541687,
"learning_rate": 0.00019987233784822395,
"loss": 0.1965,
"step": 18
},
{
"epoch": 0.24398073836276082,
"grad_norm": 0.18069303035736084,
"learning_rate": 0.00019982625098055525,
"loss": 0.1589,
"step": 19
},
{
"epoch": 0.2568218298555377,
"grad_norm": 0.2211710810661316,
"learning_rate": 0.0001997730826289277,
"loss": 0.1806,
"step": 20
},
{
"epoch": 0.2696629213483146,
"grad_norm": 0.2257445603609085,
"learning_rate": 0.00019971283656500276,
"loss": 0.1807,
"step": 21
},
{
"epoch": 0.2825040128410915,
"grad_norm": 0.22210289537906647,
"learning_rate": 0.00019964551706252144,
"loss": 0.1869,
"step": 22
},
{
"epoch": 0.2953451043338684,
"grad_norm": 0.19516178965568542,
"learning_rate": 0.00019957112889700085,
"loss": 0.1646,
"step": 23
},
{
"epoch": 0.30818619582664525,
"grad_norm": 0.22516900300979614,
"learning_rate": 0.00019948967734539571,
"loss": 0.164,
"step": 24
},
{
"epoch": 0.32102728731942215,
"grad_norm": 0.2650158107280731,
"learning_rate": 0.0001994011681857238,
"loss": 0.1733,
"step": 25
},
{
"epoch": 0.33386837881219905,
"grad_norm": 0.16395625472068787,
"learning_rate": 0.00019930560769665617,
"loss": 0.1448,
"step": 26
},
{
"epoch": 0.3467094703049759,
"grad_norm": 0.21180006861686707,
"learning_rate": 0.00019920300265707184,
"loss": 0.1933,
"step": 27
},
{
"epoch": 0.3595505617977528,
"grad_norm": 0.17538852989673615,
"learning_rate": 0.0001990933603455767,
"loss": 0.1767,
"step": 28
},
{
"epoch": 0.3723916532905297,
"grad_norm": 0.19017687439918518,
"learning_rate": 0.00019897668853998725,
"loss": 0.2061,
"step": 29
},
{
"epoch": 0.3852327447833066,
"grad_norm": 0.1627688705921173,
"learning_rate": 0.0001988529955167791,
"loss": 0.165,
"step": 30
},
{
"epoch": 0.39807383627608345,
"grad_norm": 0.23488560318946838,
"learning_rate": 0.00019872229005049946,
"loss": 0.1807,
"step": 31
},
{
"epoch": 0.41091492776886035,
"grad_norm": 0.16365641355514526,
"learning_rate": 0.00019858458141314503,
"loss": 0.1606,
"step": 32
},
{
"epoch": 0.42375601926163725,
"grad_norm": 0.1977553814649582,
"learning_rate": 0.00019843987937350396,
"loss": 0.1885,
"step": 33
},
{
"epoch": 0.43659711075441415,
"grad_norm": 0.24794891476631165,
"learning_rate": 0.00019828819419646316,
"loss": 0.1695,
"step": 34
},
{
"epoch": 0.449438202247191,
"grad_norm": 0.19939230382442474,
"learning_rate": 0.00019812953664228,
"loss": 0.1516,
"step": 35
},
{
"epoch": 0.4622792937399679,
"grad_norm": 0.2248964011669159,
"learning_rate": 0.00019796391796581887,
"loss": 0.1509,
"step": 36
},
{
"epoch": 0.4751203852327448,
"grad_norm": 0.2671429514884949,
"learning_rate": 0.000197791349915753,
"loss": 0.1936,
"step": 37
},
{
"epoch": 0.48796147672552165,
"grad_norm": 0.1820429414510727,
"learning_rate": 0.00019761184473373095,
"loss": 0.1297,
"step": 38
},
{
"epoch": 0.5008025682182986,
"grad_norm": 0.15517914295196533,
"learning_rate": 0.00019742541515350813,
"loss": 0.1664,
"step": 39
},
{
"epoch": 0.5136436597110754,
"grad_norm": 0.1475019007921219,
"learning_rate": 0.00019723207440004362,
"loss": 0.1606,
"step": 40
},
{
"epoch": 0.5264847512038523,
"grad_norm": 0.1478549689054489,
"learning_rate": 0.0001970318361885619,
"loss": 0.1629,
"step": 41
},
{
"epoch": 0.5393258426966292,
"grad_norm": 0.1522587686777115,
"learning_rate": 0.00019682471472358003,
"loss": 0.1854,
"step": 42
},
{
"epoch": 0.5521669341894061,
"grad_norm": 0.16658446192741394,
"learning_rate": 0.00019661072469789992,
"loss": 0.16,
"step": 43
},
{
"epoch": 0.565008025682183,
"grad_norm": 0.1563337743282318,
"learning_rate": 0.0001963898812915661,
"loss": 0.1646,
"step": 44
},
{
"epoch": 0.5778491171749599,
"grad_norm": 0.18028098344802856,
"learning_rate": 0.00019616220017078882,
"loss": 0.1679,
"step": 45
},
{
"epoch": 0.5906902086677368,
"grad_norm": 0.17881852388381958,
"learning_rate": 0.00019592769748683287,
"loss": 0.1567,
"step": 46
},
{
"epoch": 0.6035313001605136,
"grad_norm": 0.18338599801063538,
"learning_rate": 0.00019568638987487155,
"loss": 0.1475,
"step": 47
},
{
"epoch": 0.6163723916532905,
"grad_norm": 0.19950391352176666,
"learning_rate": 0.0001954382944528069,
"loss": 0.1604,
"step": 48
},
{
"epoch": 0.6292134831460674,
"grad_norm": 0.2285371720790863,
"learning_rate": 0.00019518342882005532,
"loss": 0.1463,
"step": 49
},
{
"epoch": 0.6420545746388443,
"grad_norm": 0.26193344593048096,
"learning_rate": 0.00019492181105629886,
"loss": 0.1649,
"step": 50
},
{
"epoch": 0.6548956661316212,
"grad_norm": 0.14862482249736786,
"learning_rate": 0.00019465345972020313,
"loss": 0.1303,
"step": 51
},
{
"epoch": 0.6677367576243981,
"grad_norm": 0.14517804980278015,
"learning_rate": 0.00019437839384810028,
"loss": 0.161,
"step": 52
},
{
"epoch": 0.680577849117175,
"grad_norm": 0.14767995476722717,
"learning_rate": 0.00019409663295263902,
"loss": 0.162,
"step": 53
},
{
"epoch": 0.6934189406099518,
"grad_norm": 0.15824687480926514,
"learning_rate": 0.00019380819702140016,
"loss": 0.1555,
"step": 54
},
{
"epoch": 0.7062600321027287,
"grad_norm": 0.1738673746585846,
"learning_rate": 0.00019351310651547884,
"loss": 0.1807,
"step": 55
},
{
"epoch": 0.7191011235955056,
"grad_norm": 0.1739288568496704,
"learning_rate": 0.00019321138236803311,
"loss": 0.1824,
"step": 56
},
{
"epoch": 0.7319422150882825,
"grad_norm": 0.1934700608253479,
"learning_rate": 0.0001929030459827988,
"loss": 0.1986,
"step": 57
},
{
"epoch": 0.7447833065810594,
"grad_norm": 0.18333880603313446,
"learning_rate": 0.00019258811923257137,
"loss": 0.1798,
"step": 58
},
{
"epoch": 0.7576243980738363,
"grad_norm": 0.17110952734947205,
"learning_rate": 0.00019226662445765417,
"loss": 0.1279,
"step": 59
},
{
"epoch": 0.7704654895666132,
"grad_norm": 0.18288837373256683,
"learning_rate": 0.0001919385844642737,
"loss": 0.1372,
"step": 60
},
{
"epoch": 0.78330658105939,
"grad_norm": 0.21783368289470673,
"learning_rate": 0.0001916040225229618,
"loss": 0.1669,
"step": 61
},
{
"epoch": 0.7961476725521669,
"grad_norm": 0.24256236851215363,
"learning_rate": 0.00019126296236690485,
"loss": 0.1504,
"step": 62
},
{
"epoch": 0.8089887640449438,
"grad_norm": 0.16231147944927216,
"learning_rate": 0.00019091542819026024,
"loss": 0.1243,
"step": 63
},
{
"epoch": 0.8218298555377207,
"grad_norm": 0.14093080163002014,
"learning_rate": 0.0001905614446464399,
"loss": 0.1547,
"step": 64
},
{
"epoch": 0.8346709470304976,
"grad_norm": 0.1700276881456375,
"learning_rate": 0.00019020103684636177,
"loss": 0.1762,
"step": 65
},
{
"epoch": 0.8475120385232745,
"grad_norm": 0.15964765846729279,
"learning_rate": 0.00018983423035666817,
"loss": 0.1593,
"step": 66
},
{
"epoch": 0.8603531300160514,
"grad_norm": 0.14215022325515747,
"learning_rate": 0.0001894610511979123,
"loss": 0.1454,
"step": 67
},
{
"epoch": 0.8731942215088283,
"grad_norm": 0.1703556478023529,
"learning_rate": 0.00018908152584271227,
"loss": 0.1681,
"step": 68
},
{
"epoch": 0.8860353130016051,
"grad_norm": 0.15497173368930817,
"learning_rate": 0.00018869568121387343,
"loss": 0.1503,
"step": 69
},
{
"epoch": 0.898876404494382,
"grad_norm": 0.15470866858959198,
"learning_rate": 0.00018830354468247817,
"loss": 0.1417,
"step": 70
},
{
"epoch": 0.9117174959871589,
"grad_norm": 0.17999379336833954,
"learning_rate": 0.00018790514406594465,
"loss": 0.1601,
"step": 71
},
{
"epoch": 0.9245585874799358,
"grad_norm": 0.15672680735588074,
"learning_rate": 0.00018750050762605312,
"loss": 0.1282,
"step": 72
},
{
"epoch": 0.9373996789727127,
"grad_norm": 0.21854081749916077,
"learning_rate": 0.0001870896640669413,
"loss": 0.1523,
"step": 73
},
{
"epoch": 0.9502407704654896,
"grad_norm": 0.19732460379600525,
"learning_rate": 0.00018667264253306823,
"loss": 0.1554,
"step": 74
},
{
"epoch": 0.9630818619582665,
"grad_norm": 0.24896161258220673,
"learning_rate": 0.00018624947260714652,
"loss": 0.1747,
"step": 75
},
{
"epoch": 0.9759229534510433,
"grad_norm": 0.13258929550647736,
"learning_rate": 0.0001858201843080441,
"loss": 0.1484,
"step": 76
},
{
"epoch": 0.9887640449438202,
"grad_norm": 0.17648810148239136,
"learning_rate": 0.00018538480808865464,
"loss": 0.1701,
"step": 77
},
{
"epoch": 1.0112359550561798,
"grad_norm": 0.5716731548309326,
"learning_rate": 0.00018494337483373726,
"loss": 0.248,
"step": 78
},
{
"epoch": 1.0240770465489566,
"grad_norm": 0.21971853077411652,
"learning_rate": 0.00018449591585772553,
"loss": 0.1502,
"step": 79
},
{
"epoch": 1.0369181380417336,
"grad_norm": 0.20206806063652039,
"learning_rate": 0.00018404246290250638,
"loss": 0.1179,
"step": 80
},
{
"epoch": 1.0497592295345104,
"grad_norm": 0.4210855960845947,
"learning_rate": 0.000183583048135168,
"loss": 0.1414,
"step": 81
},
{
"epoch": 1.0626003210272874,
"grad_norm": 0.2643897831439972,
"learning_rate": 0.00018311770414571835,
"loss": 0.1228,
"step": 82
},
{
"epoch": 1.0754414125200642,
"grad_norm": 0.27934709191322327,
"learning_rate": 0.000182646463944773,
"loss": 0.1244,
"step": 83
},
{
"epoch": 1.088282504012841,
"grad_norm": 0.2761859893798828,
"learning_rate": 0.00018216936096121348,
"loss": 0.1373,
"step": 84
},
{
"epoch": 1.101123595505618,
"grad_norm": 0.3699813187122345,
"learning_rate": 0.00018168642903981607,
"loss": 0.123,
"step": 85
},
{
"epoch": 1.1139646869983948,
"grad_norm": 0.38410046696662903,
"learning_rate": 0.00018119770243885065,
"loss": 0.126,
"step": 86
},
{
"epoch": 1.1268057784911718,
"grad_norm": 0.36278554797172546,
"learning_rate": 0.0001807032158276508,
"loss": 0.1081,
"step": 87
},
{
"epoch": 1.1396468699839486,
"grad_norm": 0.4462052583694458,
"learning_rate": 0.00018020300428415407,
"loss": 0.1134,
"step": 88
},
{
"epoch": 1.1524879614767256,
"grad_norm": 0.4787779152393341,
"learning_rate": 0.00017969710329241386,
"loss": 0.1165,
"step": 89
},
{
"epoch": 1.1653290529695024,
"grad_norm": 0.34396493434906006,
"learning_rate": 0.00017918554874008226,
"loss": 0.1239,
"step": 90
},
{
"epoch": 1.1781701444622792,
"grad_norm": 0.2252732515335083,
"learning_rate": 0.00017866837691586404,
"loss": 0.1598,
"step": 91
},
{
"epoch": 1.1910112359550562,
"grad_norm": 0.28776365518569946,
"learning_rate": 0.00017814562450694266,
"loss": 0.1315,
"step": 92
},
{
"epoch": 1.203852327447833,
"grad_norm": 0.2533372938632965,
"learning_rate": 0.00017761732859637746,
"loss": 0.1319,
"step": 93
},
{
"epoch": 1.21669341894061,
"grad_norm": 0.258912593126297,
"learning_rate": 0.0001770835266604734,
"loss": 0.1492,
"step": 94
},
{
"epoch": 1.2295345104333868,
"grad_norm": 0.2596625089645386,
"learning_rate": 0.0001765442565661222,
"loss": 0.1362,
"step": 95
},
{
"epoch": 1.2423756019261638,
"grad_norm": 0.3291197717189789,
"learning_rate": 0.00017599955656811653,
"loss": 0.1325,
"step": 96
},
{
"epoch": 1.2552166934189406,
"grad_norm": 0.3218838572502136,
"learning_rate": 0.0001754494653064359,
"loss": 0.1297,
"step": 97
},
{
"epoch": 1.2680577849117176,
"grad_norm": 0.4448782801628113,
"learning_rate": 0.00017489402180350582,
"loss": 0.1291,
"step": 98
},
{
"epoch": 1.2808988764044944,
"grad_norm": 0.33252349495887756,
"learning_rate": 0.00017433326546142968,
"loss": 0.1123,
"step": 99
},
{
"epoch": 1.2937399678972712,
"grad_norm": 0.4413713216781616,
"learning_rate": 0.00017376723605919345,
"loss": 0.124,
"step": 100
},
{
"epoch": 1.3065810593900482,
"grad_norm": 0.407705157995224,
"learning_rate": 0.00017319597374984395,
"loss": 0.1194,
"step": 101
},
{
"epoch": 1.319422150882825,
"grad_norm": 0.4282243251800537,
"learning_rate": 0.00017261951905764056,
"loss": 0.1281,
"step": 102
},
{
"epoch": 1.332263242375602,
"grad_norm": 0.30847808718681335,
"learning_rate": 0.00017203791287518028,
"loss": 0.127,
"step": 103
},
{
"epoch": 1.3451043338683788,
"grad_norm": 0.3466252386569977,
"learning_rate": 0.00017145119646049705,
"loss": 0.1737,
"step": 104
},
{
"epoch": 1.3579454253611556,
"grad_norm": 0.409016489982605,
"learning_rate": 0.00017085941143413496,
"loss": 0.1345,
"step": 105
},
{
"epoch": 1.3707865168539326,
"grad_norm": 0.3255477547645569,
"learning_rate": 0.0001702625997761957,
"loss": 0.1621,
"step": 106
},
{
"epoch": 1.3836276083467094,
"grad_norm": 0.2995210587978363,
"learning_rate": 0.00016966080382336074,
"loss": 0.1752,
"step": 107
},
{
"epoch": 1.3964686998394864,
"grad_norm": 0.3129432797431946,
"learning_rate": 0.00016905406626588777,
"loss": 0.1763,
"step": 108
},
{
"epoch": 1.4093097913322632,
"grad_norm": 0.31171512603759766,
"learning_rate": 0.00016844243014458262,
"loss": 0.1328,
"step": 109
},
{
"epoch": 1.4221508828250402,
"grad_norm": 0.29522988200187683,
"learning_rate": 0.00016782593884774586,
"loss": 0.1394,
"step": 110
},
{
"epoch": 1.434991974317817,
"grad_norm": 0.31598663330078125,
"learning_rate": 0.0001672046361080949,
"loss": 0.1351,
"step": 111
},
{
"epoch": 1.447833065810594,
"grad_norm": 0.3824458718299866,
"learning_rate": 0.00016657856599966182,
"loss": 0.1334,
"step": 112
},
{
"epoch": 1.4606741573033708,
"grad_norm": 0.39797738194465637,
"learning_rate": 0.0001659477729346667,
"loss": 0.1208,
"step": 113
},
{
"epoch": 1.4735152487961476,
"grad_norm": 0.4944222867488861,
"learning_rate": 0.0001653123016603672,
"loss": 0.1392,
"step": 114
},
{
"epoch": 1.4863563402889246,
"grad_norm": 0.30534660816192627,
"learning_rate": 0.0001646721972558842,
"loss": 0.1243,
"step": 115
},
{
"epoch": 1.4991974317817014,
"grad_norm": 0.33155277371406555,
"learning_rate": 0.00016402750512900397,
"loss": 0.1633,
"step": 116
},
{
"epoch": 1.5120385232744784,
"grad_norm": 0.343932569026947,
"learning_rate": 0.0001633782710129571,
"loss": 0.1701,
"step": 117
},
{
"epoch": 1.5248796147672552,
"grad_norm": 0.27937018871307373,
"learning_rate": 0.00016272454096317432,
"loss": 0.1549,
"step": 118
},
{
"epoch": 1.537720706260032,
"grad_norm": 0.35195839405059814,
"learning_rate": 0.00016206636135401913,
"loss": 0.1805,
"step": 119
},
{
"epoch": 1.550561797752809,
"grad_norm": 0.3309479057788849,
"learning_rate": 0.00016140377887549843,
"loss": 0.138,
"step": 120
},
{
"epoch": 1.563402889245586,
"grad_norm": 0.2956474721431732,
"learning_rate": 0.0001607368405299503,
"loss": 0.1484,
"step": 121
},
{
"epoch": 1.5762439807383628,
"grad_norm": 0.300729364156723,
"learning_rate": 0.00016006559362870964,
"loss": 0.1577,
"step": 122
},
{
"epoch": 1.5890850722311396,
"grad_norm": 0.35056933760643005,
"learning_rate": 0.00015939008578875214,
"loss": 0.1341,
"step": 123
},
{
"epoch": 1.6019261637239164,
"grad_norm": 0.3127368688583374,
"learning_rate": 0.00015871036492931632,
"loss": 0.1245,
"step": 124
},
{
"epoch": 1.6147672552166934,
"grad_norm": 0.31962379813194275,
"learning_rate": 0.00015802647926850424,
"loss": 0.1269,
"step": 125
},
{
"epoch": 1.6276083467094704,
"grad_norm": 0.30289211869239807,
"learning_rate": 0.00015733847731986113,
"loss": 0.1189,
"step": 126
},
{
"epoch": 1.6404494382022472,
"grad_norm": 0.42945238947868347,
"learning_rate": 0.00015664640788893376,
"loss": 0.1386,
"step": 127
},
{
"epoch": 1.653290529695024,
"grad_norm": 0.2242669314146042,
"learning_rate": 0.0001559503200698084,
"loss": 0.1136,
"step": 128
},
{
"epoch": 1.666131621187801,
"grad_norm": 0.382952481508255,
"learning_rate": 0.00015525026324162805,
"loss": 0.1661,
"step": 129
},
{
"epoch": 1.6789727126805778,
"grad_norm": 0.31802457571029663,
"learning_rate": 0.00015454628706508962,
"loss": 0.1715,
"step": 130
},
{
"epoch": 1.6918138041733548,
"grad_norm": 0.31699663400650024,
"learning_rate": 0.00015383844147892126,
"loss": 0.1454,
"step": 131
},
{
"epoch": 1.7046548956661316,
"grad_norm": 0.3550918400287628,
"learning_rate": 0.00015312677669633952,
"loss": 0.1454,
"step": 132
},
{
"epoch": 1.7174959871589084,
"grad_norm": 0.41878822445869446,
"learning_rate": 0.00015241134320148752,
"loss": 0.1565,
"step": 133
},
{
"epoch": 1.7303370786516854,
"grad_norm": 0.2940831780433655,
"learning_rate": 0.00015169219174585372,
"loss": 0.1405,
"step": 134
},
{
"epoch": 1.7431781701444624,
"grad_norm": 0.40646418929100037,
"learning_rate": 0.00015096937334467151,
"loss": 0.148,
"step": 135
},
{
"epoch": 1.7560192616372392,
"grad_norm": 0.296655535697937,
"learning_rate": 0.00015024293927330047,
"loss": 0.1325,
"step": 136
},
{
"epoch": 1.768860353130016,
"grad_norm": 0.2861349582672119,
"learning_rate": 0.00014951294106358887,
"loss": 0.1137,
"step": 137
},
{
"epoch": 1.7817014446227928,
"grad_norm": 0.35022491216659546,
"learning_rate": 0.00014877943050021827,
"loss": 0.1437,
"step": 138
},
{
"epoch": 1.7945425361155698,
"grad_norm": 0.39922255277633667,
"learning_rate": 0.0001480424596170298,
"loss": 0.1271,
"step": 139
},
{
"epoch": 1.8073836276083468,
"grad_norm": 0.21837952733039856,
"learning_rate": 0.00014730208069333313,
"loss": 0.1044,
"step": 140
},
{
"epoch": 1.8202247191011236,
"grad_norm": 0.28346794843673706,
"learning_rate": 0.00014655834625019787,
"loss": 0.1535,
"step": 141
},
{
"epoch": 1.8330658105939004,
"grad_norm": 0.3031107783317566,
"learning_rate": 0.00014581130904672764,
"loss": 0.1509,
"step": 142
},
{
"epoch": 1.8459069020866774,
"grad_norm": 0.288959801197052,
"learning_rate": 0.00014506102207631773,
"loss": 0.1565,
"step": 143
},
{
"epoch": 1.8587479935794544,
"grad_norm": 0.26415252685546875,
"learning_rate": 0.00014430753856289565,
"loss": 0.1542,
"step": 144
},
{
"epoch": 1.8715890850722312,
"grad_norm": 0.29712775349617004,
"learning_rate": 0.0001435509119571456,
"loss": 0.1442,
"step": 145
},
{
"epoch": 1.884430176565008,
"grad_norm": 0.3248273730278015,
"learning_rate": 0.00014279119593271666,
"loss": 0.142,
"step": 146
},
{
"epoch": 1.8972712680577848,
"grad_norm": 0.34400874376296997,
"learning_rate": 0.00014202844438241546,
"loss": 0.1495,
"step": 147
},
{
"epoch": 1.9101123595505618,
"grad_norm": 0.6406733989715576,
"learning_rate": 0.00014126271141438315,
"loss": 0.1178,
"step": 148
},
{
"epoch": 1.9229534510433388,
"grad_norm": 0.26794880628585815,
"learning_rate": 0.00014049405134825677,
"loss": 0.1219,
"step": 149
},
{
"epoch": 1.9357945425361156,
"grad_norm": 0.35011592507362366,
"learning_rate": 0.00013972251871131625,
"loss": 0.1417,
"step": 150
},
{
"epoch": 1.9486356340288924,
"grad_norm": 0.4136745035648346,
"learning_rate": 0.0001389481682346162,
"loss": 0.1319,
"step": 151
},
{
"epoch": 1.9614767255216692,
"grad_norm": 0.4271896779537201,
"learning_rate": 0.00013817105484910334,
"loss": 0.1226,
"step": 152
},
{
"epoch": 1.9743178170144462,
"grad_norm": 0.26580625772476196,
"learning_rate": 0.00013739123368171994,
"loss": 0.1477,
"step": 153
},
{
"epoch": 1.9871589085072232,
"grad_norm": 0.36320167779922485,
"learning_rate": 0.00013660876005149318,
"loss": 0.1446,
"step": 154
},
{
"epoch": 2.009630818619583,
"grad_norm": 0.556969404220581,
"learning_rate": 0.00013582368946561083,
"loss": 0.2174,
"step": 155
},
{
"epoch": 2.0224719101123596,
"grad_norm": 0.29500341415405273,
"learning_rate": 0.00013503607761548384,
"loss": 0.1428,
"step": 156
},
{
"epoch": 2.0353130016051364,
"grad_norm": 0.3006628155708313,
"learning_rate": 0.00013424598037279544,
"loss": 0.1346,
"step": 157
},
{
"epoch": 2.048154093097913,
"grad_norm": 0.3429463803768158,
"learning_rate": 0.00013345345378553805,
"loss": 0.1418,
"step": 158
},
{
"epoch": 2.0609951845906904,
"grad_norm": 0.31199735403060913,
"learning_rate": 0.000132658554074037,
"loss": 0.1335,
"step": 159
},
{
"epoch": 2.073836276083467,
"grad_norm": 0.35404714941978455,
"learning_rate": 0.00013186133762696267,
"loss": 0.1295,
"step": 160
},
{
"epoch": 2.086677367576244,
"grad_norm": 0.2951603829860687,
"learning_rate": 0.00013106186099733018,
"loss": 0.1309,
"step": 161
},
{
"epoch": 2.099518459069021,
"grad_norm": 0.32650235295295715,
"learning_rate": 0.0001302601808984877,
"loss": 0.1414,
"step": 162
},
{
"epoch": 2.1123595505617976,
"grad_norm": 0.5657181143760681,
"learning_rate": 0.0001294563542000933,
"loss": 0.1141,
"step": 163
},
{
"epoch": 2.125200642054575,
"grad_norm": 0.31900152564048767,
"learning_rate": 0.0001286504379240807,
"loss": 0.1105,
"step": 164
},
{
"epoch": 2.1380417335473516,
"grad_norm": 0.3072754442691803,
"learning_rate": 0.0001278424892406143,
"loss": 0.118,
"step": 165
},
{
"epoch": 2.1508828250401284,
"grad_norm": 0.36620378494262695,
"learning_rate": 0.00012703256546403374,
"loss": 0.1147,
"step": 166
},
{
"epoch": 2.163723916532905,
"grad_norm": 0.35902220010757446,
"learning_rate": 0.00012622072404878774,
"loss": 0.0722,
"step": 167
},
{
"epoch": 2.176565008025682,
"grad_norm": 0.3162679970264435,
"learning_rate": 0.0001254070225853589,
"loss": 0.1503,
"step": 168
},
{
"epoch": 2.189406099518459,
"grad_norm": 0.2725870907306671,
"learning_rate": 0.00012459151879617785,
"loss": 0.1488,
"step": 169
},
{
"epoch": 2.202247191011236,
"grad_norm": 0.28934144973754883,
"learning_rate": 0.00012377427053152903,
"loss": 0.1199,
"step": 170
},
{
"epoch": 2.215088282504013,
"grad_norm": 0.23648247122764587,
"learning_rate": 0.00012295533576544648,
"loss": 0.1137,
"step": 171
},
{
"epoch": 2.2279293739967896,
"grad_norm": 0.3106015622615814,
"learning_rate": 0.00012213477259160146,
"loss": 0.1433,
"step": 172
},
{
"epoch": 2.240770465489567,
"grad_norm": 0.25510647892951965,
"learning_rate": 0.00012131263921918143,
"loss": 0.1107,
"step": 173
},
{
"epoch": 2.2536115569823436,
"grad_norm": 0.33392271399497986,
"learning_rate": 0.00012048899396876065,
"loss": 0.1245,
"step": 174
},
{
"epoch": 2.2664526484751204,
"grad_norm": 0.2912399172782898,
"learning_rate": 0.00011966389526816322,
"loss": 0.1124,
"step": 175
},
{
"epoch": 2.279293739967897,
"grad_norm": 0.30920150876045227,
"learning_rate": 0.00011883740164831818,
"loss": 0.0881,
"step": 176
},
{
"epoch": 2.292134831460674,
"grad_norm": 0.3264816701412201,
"learning_rate": 0.00011800957173910748,
"loss": 0.1108,
"step": 177
},
{
"epoch": 2.304975922953451,
"grad_norm": 0.33401617407798767,
"learning_rate": 0.00011718046426520689,
"loss": 0.1088,
"step": 178
},
{
"epoch": 2.317817014446228,
"grad_norm": 0.3508036434650421,
"learning_rate": 0.00011635013804192015,
"loss": 0.1157,
"step": 179
},
{
"epoch": 2.330658105939005,
"grad_norm": 0.31395018100738525,
"learning_rate": 0.00011551865197100686,
"loss": 0.1019,
"step": 180
},
{
"epoch": 2.3434991974317816,
"grad_norm": 0.27874499559402466,
"learning_rate": 0.00011468606503650394,
"loss": 0.1243,
"step": 181
},
{
"epoch": 2.3563402889245584,
"grad_norm": 0.24277067184448242,
"learning_rate": 0.00011385243630054144,
"loss": 0.1447,
"step": 182
},
{
"epoch": 2.3691813804173356,
"grad_norm": 0.2734696567058563,
"learning_rate": 0.00011301782489915287,
"loss": 0.1438,
"step": 183
},
{
"epoch": 2.3820224719101124,
"grad_norm": 0.2731485068798065,
"learning_rate": 0.00011218229003808012,
"loss": 0.1431,
"step": 184
},
{
"epoch": 2.394863563402889,
"grad_norm": 0.47293299436569214,
"learning_rate": 0.00011134589098857356,
"loss": 0.1287,
"step": 185
},
{
"epoch": 2.407704654895666,
"grad_norm": 0.3162679076194763,
"learning_rate": 0.00011050868708318747,
"loss": 0.1217,
"step": 186
},
{
"epoch": 2.420545746388443,
"grad_norm": 0.39970093965530396,
"learning_rate": 0.00010967073771157099,
"loss": 0.1352,
"step": 187
},
{
"epoch": 2.43338683788122,
"grad_norm": 0.2743336856365204,
"learning_rate": 0.00010883210231625534,
"loss": 0.0977,
"step": 188
},
{
"epoch": 2.446227929373997,
"grad_norm": 0.37246617674827576,
"learning_rate": 0.00010799284038843686,
"loss": 0.1047,
"step": 189
},
{
"epoch": 2.4590690208667736,
"grad_norm": 0.3695964813232422,
"learning_rate": 0.00010715301146375694,
"loss": 0.1127,
"step": 190
},
{
"epoch": 2.4719101123595504,
"grad_norm": 0.35048359632492065,
"learning_rate": 0.00010631267511807861,
"loss": 0.1153,
"step": 191
},
{
"epoch": 2.4847512038523276,
"grad_norm": 0.20369504392147064,
"learning_rate": 0.0001054718909632604,
"loss": 0.0828,
"step": 192
},
{
"epoch": 2.4975922953451044,
"grad_norm": 0.3476305902004242,
"learning_rate": 0.00010463071864292764,
"loss": 0.1384,
"step": 193
},
{
"epoch": 2.510433386837881,
"grad_norm": 0.2885417640209198,
"learning_rate": 0.00010378921782824128,
"loss": 0.1496,
"step": 194
},
{
"epoch": 2.523274478330658,
"grad_norm": 0.2671290338039398,
"learning_rate": 0.00010294744821366504,
"loss": 0.14,
"step": 195
},
{
"epoch": 2.5361155698234352,
"grad_norm": 0.30776816606521606,
"learning_rate": 0.0001021054695127309,
"loss": 0.1442,
"step": 196
},
{
"epoch": 2.548956661316212,
"grad_norm": 0.2914685904979706,
"learning_rate": 0.00010126334145380288,
"loss": 0.1321,
"step": 197
},
{
"epoch": 2.561797752808989,
"grad_norm": 0.23030787706375122,
"learning_rate": 0.00010042112377584028,
"loss": 0.1177,
"step": 198
},
{
"epoch": 2.5746388443017656,
"grad_norm": 0.2737199068069458,
"learning_rate": 9.957887622415975e-05,
"loss": 0.1198,
"step": 199
},
{
"epoch": 2.5874799357945424,
"grad_norm": 0.26640573143959045,
"learning_rate": 9.873665854619715e-05,
"loss": 0.1084,
"step": 200
},
{
"epoch": 2.600321027287319,
"grad_norm": 0.3201727569103241,
"learning_rate": 9.789453048726912e-05,
"loss": 0.1022,
"step": 201
},
{
"epoch": 2.6131621187800964,
"grad_norm": 0.262690007686615,
"learning_rate": 9.705255178633497e-05,
"loss": 0.094,
"step": 202
},
{
"epoch": 2.626003210272873,
"grad_norm": 0.43916091322898865,
"learning_rate": 9.621078217175876e-05,
"loss": 0.1059,
"step": 203
},
{
"epoch": 2.63884430176565,
"grad_norm": 0.3432372212409973,
"learning_rate": 9.53692813570724e-05,
"loss": 0.1142,
"step": 204
},
{
"epoch": 2.6516853932584272,
"grad_norm": 0.23425506055355072,
"learning_rate": 9.452810903673963e-05,
"loss": 0.1118,
"step": 205
},
{
"epoch": 2.664526484751204,
"grad_norm": 0.2430526316165924,
"learning_rate": 9.368732488192143e-05,
"loss": 0.0987,
"step": 206
},
{
"epoch": 2.677367576243981,
"grad_norm": 0.28097087144851685,
"learning_rate": 9.28469885362431e-05,
"loss": 0.1198,
"step": 207
},
{
"epoch": 2.6902086677367576,
"grad_norm": 0.2345268875360489,
"learning_rate": 9.200715961156317e-05,
"loss": 0.1115,
"step": 208
},
{
"epoch": 2.7030497592295344,
"grad_norm": 0.4230293035507202,
"learning_rate": 9.116789768374467e-05,
"loss": 0.1497,
"step": 209
},
{
"epoch": 2.715890850722311,
"grad_norm": 0.2744080424308777,
"learning_rate": 9.032926228842902e-05,
"loss": 0.1296,
"step": 210
},
{
"epoch": 2.7287319422150884,
"grad_norm": 0.3323458135128021,
"learning_rate": 8.949131291681257e-05,
"loss": 0.143,
"step": 211
},
{
"epoch": 2.741573033707865,
"grad_norm": 0.2832281291484833,
"learning_rate": 8.865410901142645e-05,
"loss": 0.1154,
"step": 212
},
{
"epoch": 2.754414125200642,
"grad_norm": 0.3109094798564911,
"learning_rate": 8.781770996191992e-05,
"loss": 0.1146,
"step": 213
},
{
"epoch": 2.767255216693419,
"grad_norm": 0.27764928340911865,
"learning_rate": 8.698217510084717e-05,
"loss": 0.0992,
"step": 214
},
{
"epoch": 2.780096308186196,
"grad_norm": 0.3414210379123688,
"learning_rate": 8.614756369945856e-05,
"loss": 0.0996,
"step": 215
},
{
"epoch": 2.792937399678973,
"grad_norm": 0.276713490486145,
"learning_rate": 8.531393496349606e-05,
"loss": 0.0948,
"step": 216
},
{
"epoch": 2.8057784911717496,
"grad_norm": 0.24623610079288483,
"learning_rate": 8.448134802899314e-05,
"loss": 0.0819,
"step": 217
},
{
"epoch": 2.8186195826645264,
"grad_norm": 0.21359184384346008,
"learning_rate": 8.364986195807986e-05,
"loss": 0.1337,
"step": 218
},
{
"epoch": 2.831460674157303,
"grad_norm": 0.28975728154182434,
"learning_rate": 8.281953573479315e-05,
"loss": 0.1421,
"step": 219
},
{
"epoch": 2.8443017656500804,
"grad_norm": 0.25927087664604187,
"learning_rate": 8.199042826089252e-05,
"loss": 0.1162,
"step": 220
},
{
"epoch": 2.857142857142857,
"grad_norm": 0.2766461968421936,
"learning_rate": 8.116259835168183e-05,
"loss": 0.1,
"step": 221
},
{
"epoch": 2.869983948635634,
"grad_norm": 0.28379619121551514,
"learning_rate": 8.033610473183678e-05,
"loss": 0.1001,
"step": 222
},
{
"epoch": 2.882825040128411,
"grad_norm": 0.2919027805328369,
"learning_rate": 7.951100603123936e-05,
"loss": 0.1081,
"step": 223
},
{
"epoch": 2.895666131621188,
"grad_norm": 0.26831895112991333,
"learning_rate": 7.868736078081859e-05,
"loss": 0.1092,
"step": 224
},
{
"epoch": 2.908507223113965,
"grad_norm": 0.2470114827156067,
"learning_rate": 7.786522740839852e-05,
"loss": 0.1069,
"step": 225
},
{
"epoch": 2.9213483146067416,
"grad_norm": 0.27561888098716736,
"learning_rate": 7.704466423455353e-05,
"loss": 0.0981,
"step": 226
},
{
"epoch": 2.9341894060995184,
"grad_norm": 0.26214268803596497,
"learning_rate": 7.622572946847098e-05,
"loss": 0.0933,
"step": 227
},
{
"epoch": 2.947030497592295,
"grad_norm": 0.3250983655452728,
"learning_rate": 7.540848120382214e-05,
"loss": 0.1005,
"step": 228
},
{
"epoch": 2.959871589085072,
"grad_norm": 0.3818821609020233,
"learning_rate": 7.459297741464115e-05,
"loss": 0.0977,
"step": 229
},
{
"epoch": 2.972712680577849,
"grad_norm": 0.1819436401128769,
"learning_rate": 7.37792759512123e-05,
"loss": 0.1218,
"step": 230
},
{
"epoch": 2.985553772070626,
"grad_norm": 0.28734883666038513,
"learning_rate": 7.296743453596631e-05,
"loss": 0.1154,
"step": 231
},
{
"epoch": 3.0080256821829856,
"grad_norm": 0.5932813882827759,
"learning_rate": 7.215751075938571e-05,
"loss": 0.2066,
"step": 232
},
{
"epoch": 3.0208667736757624,
"grad_norm": 0.19985750317573547,
"learning_rate": 7.134956207591936e-05,
"loss": 0.1257,
"step": 233
},
{
"epoch": 3.033707865168539,
"grad_norm": 0.2326143980026245,
"learning_rate": 7.054364579990677e-05,
"loss": 0.1019,
"step": 234
},
{
"epoch": 3.0465489566613164,
"grad_norm": 0.24029302597045898,
"learning_rate": 6.973981910151234e-05,
"loss": 0.1013,
"step": 235
},
{
"epoch": 3.059390048154093,
"grad_norm": 0.19977405667304993,
"learning_rate": 6.893813900266987e-05,
"loss": 0.087,
"step": 236
},
{
"epoch": 3.07223113964687,
"grad_norm": 0.2730342447757721,
"learning_rate": 6.813866237303735e-05,
"loss": 0.0991,
"step": 237
},
{
"epoch": 3.085072231139647,
"grad_norm": 0.2400568425655365,
"learning_rate": 6.734144592596303e-05,
"loss": 0.1135,
"step": 238
},
{
"epoch": 3.0979133226324236,
"grad_norm": 0.22795532643795013,
"learning_rate": 6.654654621446202e-05,
"loss": 0.1163,
"step": 239
},
{
"epoch": 3.110754414125201,
"grad_norm": 0.2850149869918823,
"learning_rate": 6.57540196272046e-05,
"loss": 0.1007,
"step": 240
},
{
"epoch": 3.1235955056179776,
"grad_norm": 0.2573602497577667,
"learning_rate": 6.496392238451621e-05,
"loss": 0.0706,
"step": 241
},
{
"epoch": 3.1364365971107544,
"grad_norm": 0.27073922753334045,
"learning_rate": 6.417631053438917e-05,
"loss": 0.0842,
"step": 242
},
{
"epoch": 3.149277688603531,
"grad_norm": 0.24778737127780914,
"learning_rate": 6.339123994850684e-05,
"loss": 0.0822,
"step": 243
},
{
"epoch": 3.162118780096308,
"grad_norm": 0.24561458826065063,
"learning_rate": 6.26087663182801e-05,
"loss": 0.0441,
"step": 244
},
{
"epoch": 3.174959871589085,
"grad_norm": 0.27637338638305664,
"learning_rate": 6.182894515089672e-05,
"loss": 0.1313,
"step": 245
},
{
"epoch": 3.187800963081862,
"grad_norm": 0.2668153941631317,
"learning_rate": 6.105183176538385e-05,
"loss": 0.1261,
"step": 246
},
{
"epoch": 3.200642054574639,
"grad_norm": 0.2336243838071823,
"learning_rate": 6.0277481288683765e-05,
"loss": 0.0968,
"step": 247
},
{
"epoch": 3.2134831460674156,
"grad_norm": 0.2731676995754242,
"learning_rate": 5.950594865174325e-05,
"loss": 0.1,
"step": 248
},
{
"epoch": 3.226324237560193,
"grad_norm": 0.19752222299575806,
"learning_rate": 5.8737288585616867e-05,
"loss": 0.1018,
"step": 249
},
{
"epoch": 3.2391653290529696,
"grad_norm": 0.274467796087265,
"learning_rate": 5.797155561758454e-05,
"loss": 0.1193,
"step": 250
},
{
"epoch": 3.2520064205457464,
"grad_norm": 0.2718445062637329,
"learning_rate": 5.7208804067283396e-05,
"loss": 0.0881,
"step": 251
},
{
"epoch": 3.264847512038523,
"grad_norm": 0.2557855248451233,
"learning_rate": 5.644908804285447e-05,
"loss": 0.0833,
"step": 252
},
{
"epoch": 3.2776886035313,
"grad_norm": 0.2935195863246918,
"learning_rate": 5.5692461437104335e-05,
"loss": 0.0881,
"step": 253
},
{
"epoch": 3.290529695024077,
"grad_norm": 0.2287595272064209,
"learning_rate": 5.493897792368228e-05,
"loss": 0.0688,
"step": 254
},
{
"epoch": 3.303370786516854,
"grad_norm": 0.2637540102005005,
"learning_rate": 5.418869095327237e-05,
"loss": 0.0665,
"step": 255
},
{
"epoch": 3.316211878009631,
"grad_norm": 0.3496418595314026,
"learning_rate": 5.344165374980217e-05,
"loss": 0.0901,
"step": 256
},
{
"epoch": 3.3290529695024076,
"grad_norm": 0.1895870864391327,
"learning_rate": 5.269791930666688e-05,
"loss": 0.0885,
"step": 257
},
{
"epoch": 3.341894060995185,
"grad_norm": 0.2253326177597046,
"learning_rate": 5.195754038297023e-05,
"loss": 0.1087,
"step": 258
},
{
"epoch": 3.3547351524879616,
"grad_norm": 0.18178820610046387,
"learning_rate": 5.122056949978176e-05,
"loss": 0.1029,
"step": 259
},
{
"epoch": 3.3675762439807384,
"grad_norm": 0.26113295555114746,
"learning_rate": 5.0487058936411144e-05,
"loss": 0.1091,
"step": 260
},
{
"epoch": 3.380417335473515,
"grad_norm": 0.2430870234966278,
"learning_rate": 4.975706072669958e-05,
"loss": 0.1085,
"step": 261
},
{
"epoch": 3.393258426966292,
"grad_norm": 0.2758089005947113,
"learning_rate": 4.9030626655328516e-05,
"loss": 0.0981,
"step": 262
},
{
"epoch": 3.4060995184590688,
"grad_norm": 0.26935628056526184,
"learning_rate": 4.8307808254146266e-05,
"loss": 0.0873,
"step": 263
},
{
"epoch": 3.418940609951846,
"grad_norm": 0.2657538652420044,
"learning_rate": 4.758865679851247e-05,
"loss": 0.0843,
"step": 264
},
{
"epoch": 3.431781701444623,
"grad_norm": 0.24748681485652924,
"learning_rate": 4.68732233036605e-05,
"loss": 0.0798,
"step": 265
},
{
"epoch": 3.4446227929373996,
"grad_norm": 0.456748366355896,
"learning_rate": 4.616155852107877e-05,
"loss": 0.0851,
"step": 266
},
{
"epoch": 3.4574638844301764,
"grad_norm": 0.23406369984149933,
"learning_rate": 4.54537129349104e-05,
"loss": 0.0831,
"step": 267
},
{
"epoch": 3.4703049759229536,
"grad_norm": 0.24098779261112213,
"learning_rate": 4.4749736758372e-05,
"loss": 0.081,
"step": 268
},
{
"epoch": 3.4831460674157304,
"grad_norm": 0.13690468668937683,
"learning_rate": 4.404967993019162e-05,
"loss": 0.0439,
"step": 269
},
{
"epoch": 3.495987158908507,
"grad_norm": 0.1986464560031891,
"learning_rate": 4.335359211106624e-05,
"loss": 0.1084,
"step": 270
},
{
"epoch": 3.508828250401284,
"grad_norm": 0.2006080001592636,
"learning_rate": 4.26615226801389e-05,
"loss": 0.1235,
"step": 271
},
{
"epoch": 3.521669341894061,
"grad_norm": 0.2104363888502121,
"learning_rate": 4.1973520731495765e-05,
"loss": 0.1134,
"step": 272
},
{
"epoch": 3.534510433386838,
"grad_norm": 0.24415647983551025,
"learning_rate": 4.128963507068371e-05,
"loss": 0.1146,
"step": 273
},
{
"epoch": 3.547351524879615,
"grad_norm": 0.2873344421386719,
"learning_rate": 4.0609914211247866e-05,
"loss": 0.0897,
"step": 274
},
{
"epoch": 3.5601926163723916,
"grad_norm": 0.19503550231456757,
"learning_rate": 3.993440637129036e-05,
"loss": 0.0896,
"step": 275
},
{
"epoch": 3.5730337078651684,
"grad_norm": 0.27025559544563293,
"learning_rate": 3.926315947004971e-05,
"loss": 0.0979,
"step": 276
},
{
"epoch": 3.5858747993579456,
"grad_norm": 0.3529897630214691,
"learning_rate": 3.859622112450158e-05,
"loss": 0.0784,
"step": 277
},
{
"epoch": 3.5987158908507224,
"grad_norm": 0.31557080149650574,
"learning_rate": 3.7933638645980905e-05,
"loss": 0.0921,
"step": 278
},
{
"epoch": 3.611556982343499,
"grad_norm": 0.25294047594070435,
"learning_rate": 3.727545903682572e-05,
"loss": 0.086,
"step": 279
},
{
"epoch": 3.624398073836276,
"grad_norm": 0.20190520584583282,
"learning_rate": 3.6621728987042905e-05,
"loss": 0.0605,
"step": 280
},
{
"epoch": 3.637239165329053,
"grad_norm": 0.2746667265892029,
"learning_rate": 3.5972494870996063e-05,
"loss": 0.077,
"step": 281
},
{
"epoch": 3.65008025682183,
"grad_norm": 0.23425573110580444,
"learning_rate": 3.532780274411581e-05,
"loss": 0.0991,
"step": 282
},
{
"epoch": 3.662921348314607,
"grad_norm": 0.15118825435638428,
"learning_rate": 3.468769833963279e-05,
"loss": 0.1051,
"step": 283
},
{
"epoch": 3.6757624398073836,
"grad_norm": 0.23835481703281403,
"learning_rate": 3.40522270653333e-05,
"loss": 0.0939,
"step": 284
},
{
"epoch": 3.6886035313001604,
"grad_norm": 0.17009741067886353,
"learning_rate": 3.3421434000338194e-05,
"loss": 0.085,
"step": 285
},
{
"epoch": 3.7014446227929376,
"grad_norm": 0.23825782537460327,
"learning_rate": 3.279536389190512e-05,
"loss": 0.0982,
"step": 286
},
{
"epoch": 3.7142857142857144,
"grad_norm": 0.21432097256183624,
"learning_rate": 3.2174061152254166e-05,
"loss": 0.079,
"step": 287
},
{
"epoch": 3.727126805778491,
"grad_norm": 0.21313104033470154,
"learning_rate": 3.1557569855417394e-05,
"loss": 0.0852,
"step": 288
},
{
"epoch": 3.739967897271268,
"grad_norm": 0.28463664650917053,
"learning_rate": 3.094593373411224e-05,
"loss": 0.0883,
"step": 289
},
{
"epoch": 3.752808988764045,
"grad_norm": 0.24580109119415283,
"learning_rate": 3.0339196176639283e-05,
"loss": 0.0771,
"step": 290
},
{
"epoch": 3.7656500802568216,
"grad_norm": 0.24096901714801788,
"learning_rate": 2.973740022380428e-05,
"loss": 0.0749,
"step": 291
},
{
"epoch": 3.778491171749599,
"grad_norm": 0.25022003054618835,
"learning_rate": 2.9140588565865077e-05,
"loss": 0.0748,
"step": 292
},
{
"epoch": 3.7913322632423756,
"grad_norm": 0.2789011597633362,
"learning_rate": 2.8548803539502987e-05,
"loss": 0.0717,
"step": 293
},
{
"epoch": 3.8041733547351524,
"grad_norm": 0.1361372172832489,
"learning_rate": 2.7962087124819757e-05,
"loss": 0.0431,
"step": 294
},
{
"epoch": 3.8170144462279296,
"grad_norm": 0.22512777149677277,
"learning_rate": 2.7380480942359477e-05,
"loss": 0.1221,
"step": 295
},
{
"epoch": 3.8298555377207064,
"grad_norm": 0.2254047840833664,
"learning_rate": 2.6804026250156077e-05,
"loss": 0.1062,
"step": 296
},
{
"epoch": 3.842696629213483,
"grad_norm": 0.27256065607070923,
"learning_rate": 2.6232763940806606e-05,
"loss": 0.1023,
"step": 297
},
{
"epoch": 3.85553772070626,
"grad_norm": 0.2014126181602478,
"learning_rate": 2.5666734538570337e-05,
"loss": 0.0999,
"step": 298
},
{
"epoch": 3.868378812199037,
"grad_norm": 0.27657970786094666,
"learning_rate": 2.5105978196494184e-05,
"loss": 0.0859,
"step": 299
},
{
"epoch": 3.8812199036918136,
"grad_norm": 0.287130743265152,
"learning_rate": 2.455053469356413e-05,
"loss": 0.0946,
"step": 300
},
{
"epoch": 3.894060995184591,
"grad_norm": 0.28707414865493774,
"learning_rate": 2.400044343188349e-05,
"loss": 0.0848,
"step": 301
},
{
"epoch": 3.9069020866773676,
"grad_norm": 0.20366519689559937,
"learning_rate": 2.3455743433877808e-05,
"loss": 0.0774,
"step": 302
},
{
"epoch": 3.9197431781701444,
"grad_norm": 0.2152596265077591,
"learning_rate": 2.2916473339526644e-05,
"loss": 0.0715,
"step": 303
},
{
"epoch": 3.932584269662921,
"grad_norm": 0.26020580530166626,
"learning_rate": 2.238267140362257e-05,
"loss": 0.0721,
"step": 304
},
{
"epoch": 3.9454253611556984,
"grad_norm": 0.3326077461242676,
"learning_rate": 2.1854375493057388e-05,
"loss": 0.0757,
"step": 305
},
{
"epoch": 3.958266452648475,
"grad_norm": 0.26648905873298645,
"learning_rate": 2.1331623084135976e-05,
"loss": 0.0832,
"step": 306
},
{
"epoch": 3.971107544141252,
"grad_norm": 0.21082034707069397,
"learning_rate": 2.0814451259917767e-05,
"loss": 0.0825,
"step": 307
},
{
"epoch": 3.983948635634029,
"grad_norm": 0.19309554994106293,
"learning_rate": 2.0302896707586127e-05,
"loss": 0.0906,
"step": 308
},
{
"epoch": 4.006420545746389,
"grad_norm": 0.35690435767173767,
"learning_rate": 1.9796995715845955e-05,
"loss": 0.1062,
"step": 309
},
{
"epoch": 4.019261637239166,
"grad_norm": 0.1724165827035904,
"learning_rate": 1.9296784172349223e-05,
"loss": 0.1117,
"step": 310
},
{
"epoch": 4.032102728731942,
"grad_norm": 0.17502835392951965,
"learning_rate": 1.8802297561149363e-05,
"loss": 0.1141,
"step": 311
},
{
"epoch": 4.044943820224719,
"grad_norm": 0.21144062280654907,
"learning_rate": 1.8313570960183977e-05,
"loss": 0.0925,
"step": 312
},
{
"epoch": 4.057784911717496,
"grad_norm": 0.25978124141693115,
"learning_rate": 1.7830639038786555e-05,
"loss": 0.0865,
"step": 313
},
{
"epoch": 4.070626003210273,
"grad_norm": 0.20121395587921143,
"learning_rate": 1.7353536055227047e-05,
"loss": 0.0658,
"step": 314
},
{
"epoch": 4.08346709470305,
"grad_norm": 0.3032616376876831,
"learning_rate": 1.688229585428167e-05,
"loss": 0.0741,
"step": 315
},
{
"epoch": 4.096308186195826,
"grad_norm": 0.19196993112564087,
"learning_rate": 1.6416951864832008e-05,
"loss": 0.069,
"step": 316
},
{
"epoch": 4.109149277688603,
"grad_norm": 0.2227143496274948,
"learning_rate": 1.5957537097493637e-05,
"loss": 0.064,
"step": 317
},
{
"epoch": 4.121990369181381,
"grad_norm": 0.2821972072124481,
"learning_rate": 1.5504084142274477e-05,
"loss": 0.0563,
"step": 318
},
{
"epoch": 4.134831460674158,
"grad_norm": 0.23713919520378113,
"learning_rate": 1.5056625166262772e-05,
"loss": 0.0657,
"step": 319
},
{
"epoch": 4.147672552166934,
"grad_norm": 0.18978498876094818,
"learning_rate": 1.4615191911345371e-05,
"loss": 0.0609,
"step": 320
},
{
"epoch": 4.160513643659711,
"grad_norm": 0.12965190410614014,
"learning_rate": 1.4179815691955923e-05,
"loss": 0.0341,
"step": 321
},
{
"epoch": 4.173354735152488,
"grad_norm": 0.1908600926399231,
"learning_rate": 1.3750527392853519e-05,
"loss": 0.0838,
"step": 322
},
{
"epoch": 4.186195826645265,
"grad_norm": 0.22569157183170319,
"learning_rate": 1.3327357466931811e-05,
"loss": 0.0833,
"step": 323
},
{
"epoch": 4.199036918138042,
"grad_norm": 0.20586968958377838,
"learning_rate": 1.2910335933058714e-05,
"loss": 0.0921,
"step": 324
},
{
"epoch": 4.211878009630818,
"grad_norm": 0.22846491634845734,
"learning_rate": 1.2499492373946919e-05,
"loss": 0.0935,
"step": 325
},
{
"epoch": 4.224719101123595,
"grad_norm": 0.2915958762168884,
"learning_rate": 1.2094855934055361e-05,
"loss": 0.0931,
"step": 326
},
{
"epoch": 4.237560192616373,
"grad_norm": 0.22078849375247955,
"learning_rate": 1.1696455317521826e-05,
"loss": 0.0873,
"step": 327
},
{
"epoch": 4.25040128410915,
"grad_norm": 0.2387520968914032,
"learning_rate": 1.1304318786126588e-05,
"loss": 0.0819,
"step": 328
},
{
"epoch": 4.263242375601926,
"grad_norm": 0.23125675320625305,
"learning_rate": 1.0918474157287728e-05,
"loss": 0.0713,
"step": 329
},
{
"epoch": 4.276083467094703,
"grad_norm": 0.25590938329696655,
"learning_rate": 1.0538948802087722e-05,
"loss": 0.0734,
"step": 330
},
{
"epoch": 4.28892455858748,
"grad_norm": 0.23935070633888245,
"learning_rate": 1.0165769643331836e-05,
"loss": 0.0677,
"step": 331
},
{
"epoch": 4.301765650080257,
"grad_norm": 0.18608956038951874,
"learning_rate": 9.79896315363823e-06,
"loss": 0.0541,
"step": 332
},
{
"epoch": 4.314606741573034,
"grad_norm": 0.3140433132648468,
"learning_rate": 9.438555353560107e-06,
"loss": 0.0586,
"step": 333
},
{
"epoch": 4.32744783306581,
"grad_norm": 0.31043872237205505,
"learning_rate": 9.084571809739806e-06,
"loss": 0.0726,
"step": 334
},
{
"epoch": 4.340288924558587,
"grad_norm": 0.19324089586734772,
"learning_rate": 8.737037633095168e-06,
"loss": 0.082,
"step": 335
},
{
"epoch": 4.353130016051364,
"grad_norm": 0.2725972831249237,
"learning_rate": 8.395977477038198e-06,
"loss": 0.1139,
"step": 336
},
{
"epoch": 4.365971107544142,
"grad_norm": 0.16910400986671448,
"learning_rate": 8.061415535726303e-06,
"loss": 0.0728,
"step": 337
},
{
"epoch": 4.378812199036918,
"grad_norm": 0.22665587067604065,
"learning_rate": 7.733375542345832e-06,
"loss": 0.096,
"step": 338
},
{
"epoch": 4.391653290529695,
"grad_norm": 0.26520606875419617,
"learning_rate": 7.411880767428636e-06,
"loss": 0.0902,
"step": 339
},
{
"epoch": 4.404494382022472,
"grad_norm": 0.19831718504428864,
"learning_rate": 7.096954017201207e-06,
"loss": 0.0816,
"step": 340
},
{
"epoch": 4.417335473515249,
"grad_norm": 0.20937596261501312,
"learning_rate": 6.788617631966909e-06,
"loss": 0.065,
"step": 341
},
{
"epoch": 4.430176565008026,
"grad_norm": 0.284106582403183,
"learning_rate": 6.486893484521162e-06,
"loss": 0.0653,
"step": 342
},
{
"epoch": 4.443017656500802,
"grad_norm": 0.23955251276493073,
"learning_rate": 6.19180297859987e-06,
"loss": 0.0483,
"step": 343
},
{
"epoch": 4.455858747993579,
"grad_norm": 0.2952927052974701,
"learning_rate": 5.9033670473610085e-06,
"loss": 0.0719,
"step": 344
},
{
"epoch": 4.468699839486356,
"grad_norm": 0.2724378705024719,
"learning_rate": 5.621606151899716e-06,
"loss": 0.0544,
"step": 345
},
{
"epoch": 4.481540930979134,
"grad_norm": 0.17572782933712006,
"learning_rate": 5.346540279796886e-06,
"loss": 0.0446,
"step": 346
},
{
"epoch": 4.49438202247191,
"grad_norm": 0.22200757265090942,
"learning_rate": 5.078188943701123e-06,
"loss": 0.1018,
"step": 347
},
{
"epoch": 4.507223113964687,
"grad_norm": 0.2331586480140686,
"learning_rate": 4.816571179944706e-06,
"loss": 0.0991,
"step": 348
},
{
"epoch": 4.520064205457464,
"grad_norm": 0.18275317549705505,
"learning_rate": 4.561705547193096e-06,
"loss": 0.0768,
"step": 349
},
{
"epoch": 4.532905296950241,
"grad_norm": 0.1845731884241104,
"learning_rate": 4.313610125128464e-06,
"loss": 0.0922,
"step": 350
},
{
"epoch": 4.545746388443018,
"grad_norm": 0.2531459331512451,
"learning_rate": 4.072302513167148e-06,
"loss": 0.0731,
"step": 351
},
{
"epoch": 4.558587479935794,
"grad_norm": 0.17735856771469116,
"learning_rate": 3.837799829211164e-06,
"loss": 0.0664,
"step": 352
},
{
"epoch": 4.571428571428571,
"grad_norm": 0.1611146628856659,
"learning_rate": 3.610118708433907e-06,
"loss": 0.0681,
"step": 353
},
{
"epoch": 4.584269662921348,
"grad_norm": 0.2285057157278061,
"learning_rate": 3.389275302100081e-06,
"loss": 0.0685,
"step": 354
},
{
"epoch": 4.597110754414125,
"grad_norm": 0.21249911189079285,
"learning_rate": 3.1752852764199812e-06,
"loss": 0.0642,
"step": 355
},
{
"epoch": 4.609951845906902,
"grad_norm": 0.27474352717399597,
"learning_rate": 2.9681638114381184e-06,
"loss": 0.0629,
"step": 356
},
{
"epoch": 4.622792937399679,
"grad_norm": 0.23277732729911804,
"learning_rate": 2.767925599956411e-06,
"loss": 0.0533,
"step": 357
},
{
"epoch": 4.635634028892456,
"grad_norm": 0.34809577465057373,
"learning_rate": 2.57458484649189e-06,
"loss": 0.0653,
"step": 358
},
{
"epoch": 4.648475120385233,
"grad_norm": 0.19076129794120789,
"learning_rate": 2.3881552662690765e-06,
"loss": 0.0871,
"step": 359
},
{
"epoch": 4.66131621187801,
"grad_norm": 0.15909138321876526,
"learning_rate": 2.2086500842470214e-06,
"loss": 0.0811,
"step": 360
},
{
"epoch": 4.674157303370786,
"grad_norm": 0.1952279508113861,
"learning_rate": 2.0360820341811635e-06,
"loss": 0.0826,
"step": 361
},
{
"epoch": 4.686998394863563,
"grad_norm": 0.14884842932224274,
"learning_rate": 1.8704633577200204e-06,
"loss": 0.0582,
"step": 362
},
{
"epoch": 4.69983948635634,
"grad_norm": 0.2566874027252197,
"learning_rate": 1.7118058035368368e-06,
"loss": 0.0891,
"step": 363
},
{
"epoch": 4.712680577849117,
"grad_norm": 0.23755532503128052,
"learning_rate": 1.5601206264960644e-06,
"loss": 0.0904,
"step": 364
},
{
"epoch": 4.725521669341894,
"grad_norm": 0.2856099009513855,
"learning_rate": 1.4154185868550106e-06,
"loss": 0.0754,
"step": 365
},
{
"epoch": 4.738362760834671,
"grad_norm": 0.24481110274791718,
"learning_rate": 1.2777099495005495e-06,
"loss": 0.0777,
"step": 366
},
{
"epoch": 4.751203852327448,
"grad_norm": 0.2835271656513214,
"learning_rate": 1.147004483220926e-06,
"loss": 0.0688,
"step": 367
},
{
"epoch": 4.764044943820225,
"grad_norm": 0.26856350898742676,
"learning_rate": 1.0233114600127547e-06,
"loss": 0.0639,
"step": 368
},
{
"epoch": 4.776886035313002,
"grad_norm": 0.2596482038497925,
"learning_rate": 9.066396544233468e-07,
"loss": 0.0557,
"step": 369
},
{
"epoch": 4.789727126805778,
"grad_norm": 0.25499358773231506,
"learning_rate": 7.969973429281741e-07,
"loss": 0.0536,
"step": 370
},
{
"epoch": 4.802568218298555,
"grad_norm": 0.1461191475391388,
"learning_rate": 6.94392303343816e-07,
"loss": 0.0412,
"step": 371
},
{
"epoch": 4.815409309791332,
"grad_norm": 0.32968461513519287,
"learning_rate": 5.988318142762128e-07,
"loss": 0.1382,
"step": 372
},
{
"epoch": 4.828250401284109,
"grad_norm": 0.19888003170490265,
"learning_rate": 5.103226546043005e-07,
"loss": 0.0932,
"step": 373
},
{
"epoch": 4.841091492776886,
"grad_norm": 0.22171039879322052,
"learning_rate": 4.28871102999151e-07,
"loss": 0.0748,
"step": 374
},
{
"epoch": 4.853932584269663,
"grad_norm": 0.18117375671863556,
"learning_rate": 3.54482937478573e-07,
"loss": 0.0805,
"step": 375
},
{
"epoch": 4.86677367576244,
"grad_norm": 0.18704640865325928,
"learning_rate": 2.871634349972285e-07,
"loss": 0.0959,
"step": 376
},
{
"epoch": 4.879614767255217,
"grad_norm": 0.2359827756881714,
"learning_rate": 2.2691737107231048e-07,
"loss": 0.0779,
"step": 377
},
{
"epoch": 4.892455858747994,
"grad_norm": 0.19910109043121338,
"learning_rate": 1.7374901944474663e-07,
"loss": 0.0737,
"step": 378
},
{
"epoch": 4.90529695024077,
"grad_norm": 0.2202347368001938,
"learning_rate": 1.2766215177605345e-07,
"loss": 0.0775,
"step": 379
},
{
"epoch": 4.918138041733547,
"grad_norm": 0.2741416096687317,
"learning_rate": 8.866003738078332e-08,
"loss": 0.0816,
"step": 380
},
{
"epoch": 4.930979133226324,
"grad_norm": 0.19433164596557617,
"learning_rate": 5.674544299457685e-08,
"loss": 0.0703,
"step": 381
},
{
"epoch": 4.943820224719101,
"grad_norm": 0.24093866348266602,
"learning_rate": 3.192063257793088e-08,
"loss": 0.0588,
"step": 382
},
{
"epoch": 4.956661316211878,
"grad_norm": 0.3715647757053375,
"learning_rate": 1.4187367155582555e-08,
"loss": 0.0546,
"step": 383
},
{
"epoch": 4.969502407704655,
"grad_norm": 0.26011672616004944,
"learning_rate": 3.546904691564823e-09,
"loss": 0.0893,
"step": 384
},
{
"epoch": 4.982343499197432,
"grad_norm": 0.19426091015338898,
"learning_rate": 0.0,
"loss": 0.0736,
"step": 385
}
],
"logging_steps": 1,
"max_steps": 385,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.2073448843153244e+18,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}