qwen3-0.6b-float-right-tagger / trainer_state.json
Calvin806's picture
Initial upload: fine-tuned tagger
5909fdf verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.3107520198881293,
"eval_steps": 500,
"global_step": 4000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0015537600994406464,
"grad_norm": 1.6171875,
"learning_rate": 9.952500000000001e-05,
"loss": 1.8667953491210938,
"step": 20
},
{
"epoch": 0.003107520198881293,
"grad_norm": 1.46875,
"learning_rate": 9.9025e-05,
"loss": 1.4211945533752441,
"step": 40
},
{
"epoch": 0.0046612802983219395,
"grad_norm": 1.46875,
"learning_rate": 9.8525e-05,
"loss": 1.3616345405578614,
"step": 60
},
{
"epoch": 0.006215040397762586,
"grad_norm": 1.375,
"learning_rate": 9.8025e-05,
"loss": 1.3699079513549806,
"step": 80
},
{
"epoch": 0.007768800497203232,
"grad_norm": 1.265625,
"learning_rate": 9.7525e-05,
"loss": 1.326680564880371,
"step": 100
},
{
"epoch": 0.009322560596643879,
"grad_norm": 1.2421875,
"learning_rate": 9.7025e-05,
"loss": 1.3107091903686523,
"step": 120
},
{
"epoch": 0.010876320696084525,
"grad_norm": 1.2421875,
"learning_rate": 9.652500000000002e-05,
"loss": 1.3247576713562013,
"step": 140
},
{
"epoch": 0.012430080795525171,
"grad_norm": 1.234375,
"learning_rate": 9.6025e-05,
"loss": 1.306673526763916,
"step": 160
},
{
"epoch": 0.013983840894965818,
"grad_norm": 1.2265625,
"learning_rate": 9.5525e-05,
"loss": 1.2789793014526367,
"step": 180
},
{
"epoch": 0.015537600994406464,
"grad_norm": 1.1796875,
"learning_rate": 9.5025e-05,
"loss": 1.2699722290039062,
"step": 200
},
{
"epoch": 0.01709136109384711,
"grad_norm": 1.1796875,
"learning_rate": 9.452500000000001e-05,
"loss": 1.2735892295837403,
"step": 220
},
{
"epoch": 0.018645121193287758,
"grad_norm": 1.15625,
"learning_rate": 9.402500000000001e-05,
"loss": 1.255952739715576,
"step": 240
},
{
"epoch": 0.020198881292728402,
"grad_norm": 1.0859375,
"learning_rate": 9.352500000000001e-05,
"loss": 1.2663206100463866,
"step": 260
},
{
"epoch": 0.02175264139216905,
"grad_norm": 1.09375,
"learning_rate": 9.302500000000001e-05,
"loss": 1.2674468994140624,
"step": 280
},
{
"epoch": 0.023306401491609695,
"grad_norm": 1.0703125,
"learning_rate": 9.252500000000001e-05,
"loss": 1.2581491470336914,
"step": 300
},
{
"epoch": 0.024860161591050343,
"grad_norm": 1.15625,
"learning_rate": 9.2025e-05,
"loss": 1.2293106079101563,
"step": 320
},
{
"epoch": 0.026413921690490987,
"grad_norm": 1.0703125,
"learning_rate": 9.1525e-05,
"loss": 1.2330900192260743,
"step": 340
},
{
"epoch": 0.027967681789931635,
"grad_norm": 1.0859375,
"learning_rate": 9.1025e-05,
"loss": 1.2186731338500976,
"step": 360
},
{
"epoch": 0.02952144188937228,
"grad_norm": 1.078125,
"learning_rate": 9.0525e-05,
"loss": 1.2420782089233398,
"step": 380
},
{
"epoch": 0.031075201988812928,
"grad_norm": 0.984375,
"learning_rate": 9.0025e-05,
"loss": 1.2236801147460938,
"step": 400
},
{
"epoch": 0.032628962088253576,
"grad_norm": 1.0390625,
"learning_rate": 8.952500000000001e-05,
"loss": 1.2241475105285644,
"step": 420
},
{
"epoch": 0.03418272218769422,
"grad_norm": 0.98828125,
"learning_rate": 8.902500000000001e-05,
"loss": 1.2227598190307618,
"step": 440
},
{
"epoch": 0.035736482287134864,
"grad_norm": 1.0390625,
"learning_rate": 8.8525e-05,
"loss": 1.2154598236083984,
"step": 460
},
{
"epoch": 0.037290242386575516,
"grad_norm": 1.0859375,
"learning_rate": 8.8025e-05,
"loss": 1.20736665725708,
"step": 480
},
{
"epoch": 0.03884400248601616,
"grad_norm": 1.0546875,
"learning_rate": 8.7525e-05,
"loss": 1.1936758041381836,
"step": 500
},
{
"epoch": 0.040397762585456805,
"grad_norm": 1.09375,
"learning_rate": 8.7025e-05,
"loss": 1.1817484855651856,
"step": 520
},
{
"epoch": 0.04195152268489745,
"grad_norm": 1.0703125,
"learning_rate": 8.6525e-05,
"loss": 1.2060453414916992,
"step": 540
},
{
"epoch": 0.0435052827843381,
"grad_norm": 0.99609375,
"learning_rate": 8.6025e-05,
"loss": 1.2340307235717773,
"step": 560
},
{
"epoch": 0.045059042883778745,
"grad_norm": 1.0,
"learning_rate": 8.5525e-05,
"loss": 1.2149415969848634,
"step": 580
},
{
"epoch": 0.04661280298321939,
"grad_norm": 0.97265625,
"learning_rate": 8.502499999999999e-05,
"loss": 1.2079537391662598,
"step": 600
},
{
"epoch": 0.048166563082660034,
"grad_norm": 1.015625,
"learning_rate": 8.4525e-05,
"loss": 1.1671560287475586,
"step": 620
},
{
"epoch": 0.049720323182100686,
"grad_norm": 1.0,
"learning_rate": 8.402500000000001e-05,
"loss": 1.1967806816101074,
"step": 640
},
{
"epoch": 0.05127408328154133,
"grad_norm": 1.0234375,
"learning_rate": 8.352500000000001e-05,
"loss": 1.178822135925293,
"step": 660
},
{
"epoch": 0.052827843380981974,
"grad_norm": 1.0625,
"learning_rate": 8.302500000000001e-05,
"loss": 1.1784405708312988,
"step": 680
},
{
"epoch": 0.054381603480422626,
"grad_norm": 0.9765625,
"learning_rate": 8.252500000000001e-05,
"loss": 1.1611870765686034,
"step": 700
},
{
"epoch": 0.05593536357986327,
"grad_norm": 1.0078125,
"learning_rate": 8.2025e-05,
"loss": 1.1876888275146484,
"step": 720
},
{
"epoch": 0.057489123679303915,
"grad_norm": 0.94921875,
"learning_rate": 8.1525e-05,
"loss": 1.18981876373291,
"step": 740
},
{
"epoch": 0.05904288377874456,
"grad_norm": 0.93359375,
"learning_rate": 8.1025e-05,
"loss": 1.1651229858398438,
"step": 760
},
{
"epoch": 0.06059664387818521,
"grad_norm": 0.953125,
"learning_rate": 8.0525e-05,
"loss": 1.1725165367126464,
"step": 780
},
{
"epoch": 0.062150403977625855,
"grad_norm": 0.94921875,
"learning_rate": 8.002500000000001e-05,
"loss": 1.1620153427124023,
"step": 800
},
{
"epoch": 0.0637041640770665,
"grad_norm": 0.94921875,
"learning_rate": 7.952500000000001e-05,
"loss": 1.180180549621582,
"step": 820
},
{
"epoch": 0.06525792417650715,
"grad_norm": 0.953125,
"learning_rate": 7.902500000000001e-05,
"loss": 1.169425106048584,
"step": 840
},
{
"epoch": 0.06681168427594779,
"grad_norm": 0.96875,
"learning_rate": 7.8525e-05,
"loss": 1.1701436996459962,
"step": 860
},
{
"epoch": 0.06836544437538844,
"grad_norm": 0.98828125,
"learning_rate": 7.8025e-05,
"loss": 1.1514653205871581,
"step": 880
},
{
"epoch": 0.06991920447482909,
"grad_norm": 0.9453125,
"learning_rate": 7.7525e-05,
"loss": 1.166794776916504,
"step": 900
},
{
"epoch": 0.07147296457426973,
"grad_norm": 0.98828125,
"learning_rate": 7.7025e-05,
"loss": 1.1640316009521485,
"step": 920
},
{
"epoch": 0.07302672467371038,
"grad_norm": 1.0,
"learning_rate": 7.6525e-05,
"loss": 1.1756441116333007,
"step": 940
},
{
"epoch": 0.07458048477315103,
"grad_norm": 0.921875,
"learning_rate": 7.6025e-05,
"loss": 1.1775863647460938,
"step": 960
},
{
"epoch": 0.07613424487259167,
"grad_norm": 0.94140625,
"learning_rate": 7.5525e-05,
"loss": 1.1620708465576173,
"step": 980
},
{
"epoch": 0.07768800497203232,
"grad_norm": 0.93359375,
"learning_rate": 7.502500000000001e-05,
"loss": 1.1424921989440917,
"step": 1000
},
{
"epoch": 0.07924176507147296,
"grad_norm": 0.94140625,
"learning_rate": 7.4525e-05,
"loss": 1.1636884689331055,
"step": 1020
},
{
"epoch": 0.08079552517091361,
"grad_norm": 0.96875,
"learning_rate": 7.4025e-05,
"loss": 1.1640792846679688,
"step": 1040
},
{
"epoch": 0.08234928527035426,
"grad_norm": 0.96875,
"learning_rate": 7.3525e-05,
"loss": 1.1577949523925781,
"step": 1060
},
{
"epoch": 0.0839030453697949,
"grad_norm": 0.9375,
"learning_rate": 7.3025e-05,
"loss": 1.1563040733337402,
"step": 1080
},
{
"epoch": 0.08545680546923555,
"grad_norm": 0.9296875,
"learning_rate": 7.2525e-05,
"loss": 1.1314552307128907,
"step": 1100
},
{
"epoch": 0.0870105655686762,
"grad_norm": 0.953125,
"learning_rate": 7.2025e-05,
"loss": 1.1430686950683593,
"step": 1120
},
{
"epoch": 0.08856432566811684,
"grad_norm": 0.921875,
"learning_rate": 7.1525e-05,
"loss": 1.1624882698059082,
"step": 1140
},
{
"epoch": 0.09011808576755749,
"grad_norm": 0.9375,
"learning_rate": 7.1025e-05,
"loss": 1.1404618263244628,
"step": 1160
},
{
"epoch": 0.09167184586699814,
"grad_norm": 0.94921875,
"learning_rate": 7.0525e-05,
"loss": 1.142820453643799,
"step": 1180
},
{
"epoch": 0.09322560596643878,
"grad_norm": 0.9296875,
"learning_rate": 7.002500000000001e-05,
"loss": 1.1361354827880858,
"step": 1200
},
{
"epoch": 0.09477936606587943,
"grad_norm": 0.91796875,
"learning_rate": 6.952500000000001e-05,
"loss": 1.141007137298584,
"step": 1220
},
{
"epoch": 0.09633312616532007,
"grad_norm": 0.93359375,
"learning_rate": 6.902500000000001e-05,
"loss": 1.140983772277832,
"step": 1240
},
{
"epoch": 0.09788688626476072,
"grad_norm": 0.94921875,
"learning_rate": 6.852500000000001e-05,
"loss": 1.1491514205932618,
"step": 1260
},
{
"epoch": 0.09944064636420137,
"grad_norm": 0.953125,
"learning_rate": 6.8025e-05,
"loss": 1.1248952865600585,
"step": 1280
},
{
"epoch": 0.10099440646364201,
"grad_norm": 0.9296875,
"learning_rate": 6.7525e-05,
"loss": 1.1090205192565918,
"step": 1300
},
{
"epoch": 0.10254816656308266,
"grad_norm": 0.9765625,
"learning_rate": 6.7025e-05,
"loss": 1.1208727836608887,
"step": 1320
},
{
"epoch": 0.10410192666252331,
"grad_norm": 0.9375,
"learning_rate": 6.6525e-05,
"loss": 1.1511701583862304,
"step": 1340
},
{
"epoch": 0.10565568676196395,
"grad_norm": 0.8828125,
"learning_rate": 6.6025e-05,
"loss": 1.1259963989257813,
"step": 1360
},
{
"epoch": 0.1072094468614046,
"grad_norm": 0.921875,
"learning_rate": 6.552500000000001e-05,
"loss": 1.1311534881591796,
"step": 1380
},
{
"epoch": 0.10876320696084525,
"grad_norm": 0.9375,
"learning_rate": 6.502500000000001e-05,
"loss": 1.1099111557006835,
"step": 1400
},
{
"epoch": 0.11031696706028589,
"grad_norm": 0.953125,
"learning_rate": 6.4525e-05,
"loss": 1.126076602935791,
"step": 1420
},
{
"epoch": 0.11187072715972654,
"grad_norm": 0.8984375,
"learning_rate": 6.4025e-05,
"loss": 1.1338358879089356,
"step": 1440
},
{
"epoch": 0.11342448725916718,
"grad_norm": 0.91796875,
"learning_rate": 6.3525e-05,
"loss": 1.1149433135986329,
"step": 1460
},
{
"epoch": 0.11497824735860783,
"grad_norm": 0.96484375,
"learning_rate": 6.3025e-05,
"loss": 1.1221566200256348,
"step": 1480
},
{
"epoch": 0.11653200745804848,
"grad_norm": 0.921875,
"learning_rate": 6.2525e-05,
"loss": 1.1223237991333008,
"step": 1500
},
{
"epoch": 0.11808576755748912,
"grad_norm": 0.90234375,
"learning_rate": 6.2025e-05,
"loss": 1.1033407211303712,
"step": 1520
},
{
"epoch": 0.11963952765692977,
"grad_norm": 0.90234375,
"learning_rate": 6.1525e-05,
"loss": 1.1114818572998046,
"step": 1540
},
{
"epoch": 0.12119328775637042,
"grad_norm": 0.91796875,
"learning_rate": 6.1025e-05,
"loss": 1.1105637550354004,
"step": 1560
},
{
"epoch": 0.12274704785581106,
"grad_norm": 0.90625,
"learning_rate": 6.0525e-05,
"loss": 1.1189040184020995,
"step": 1580
},
{
"epoch": 0.12430080795525171,
"grad_norm": 0.921875,
"learning_rate": 6.0024999999999995e-05,
"loss": 1.1234063148498534,
"step": 1600
},
{
"epoch": 0.12585456805469236,
"grad_norm": 0.953125,
"learning_rate": 5.9525e-05,
"loss": 1.1020179748535157,
"step": 1620
},
{
"epoch": 0.127408328154133,
"grad_norm": 0.921875,
"learning_rate": 5.9025000000000005e-05,
"loss": 1.1260129928588867,
"step": 1640
},
{
"epoch": 0.12896208825357364,
"grad_norm": 0.87890625,
"learning_rate": 5.8525000000000006e-05,
"loss": 1.1067705154418945,
"step": 1660
},
{
"epoch": 0.1305158483530143,
"grad_norm": 0.94140625,
"learning_rate": 5.802500000000001e-05,
"loss": 1.115440273284912,
"step": 1680
},
{
"epoch": 0.13206960845245494,
"grad_norm": 0.91796875,
"learning_rate": 5.752500000000001e-05,
"loss": 1.1169689178466797,
"step": 1700
},
{
"epoch": 0.13362336855189558,
"grad_norm": 0.921875,
"learning_rate": 5.7025000000000004e-05,
"loss": 1.1162803649902344,
"step": 1720
},
{
"epoch": 0.13517712865133624,
"grad_norm": 0.8984375,
"learning_rate": 5.6525000000000005e-05,
"loss": 1.1266037940979003,
"step": 1740
},
{
"epoch": 0.13673088875077688,
"grad_norm": 0.953125,
"learning_rate": 5.6025000000000007e-05,
"loss": 1.1090587615966796,
"step": 1760
},
{
"epoch": 0.13828464885021752,
"grad_norm": 0.8828125,
"learning_rate": 5.552500000000001e-05,
"loss": 1.1141853332519531,
"step": 1780
},
{
"epoch": 0.13983840894965818,
"grad_norm": 0.88671875,
"learning_rate": 5.5025e-05,
"loss": 1.1379024505615234,
"step": 1800
},
{
"epoch": 0.14139216904909882,
"grad_norm": 0.91015625,
"learning_rate": 5.4525000000000004e-05,
"loss": 1.110361099243164,
"step": 1820
},
{
"epoch": 0.14294592914853946,
"grad_norm": 0.8984375,
"learning_rate": 5.4025000000000005e-05,
"loss": 1.1104223251342773,
"step": 1840
},
{
"epoch": 0.14449968924798012,
"grad_norm": 0.953125,
"learning_rate": 5.3525e-05,
"loss": 1.0993282318115234,
"step": 1860
},
{
"epoch": 0.14605344934742076,
"grad_norm": 0.88671875,
"learning_rate": 5.3025e-05,
"loss": 1.1151838302612305,
"step": 1880
},
{
"epoch": 0.1476072094468614,
"grad_norm": 0.91796875,
"learning_rate": 5.2525e-05,
"loss": 1.1184428215026856,
"step": 1900
},
{
"epoch": 0.14916096954630206,
"grad_norm": 0.921875,
"learning_rate": 5.2025000000000004e-05,
"loss": 1.1321413040161132,
"step": 1920
},
{
"epoch": 0.1507147296457427,
"grad_norm": 0.875,
"learning_rate": 5.1525e-05,
"loss": 1.1035722732543944,
"step": 1940
},
{
"epoch": 0.15226848974518334,
"grad_norm": 0.92578125,
"learning_rate": 5.1025e-05,
"loss": 1.0843469619750976,
"step": 1960
},
{
"epoch": 0.153822249844624,
"grad_norm": 0.89453125,
"learning_rate": 5.0525e-05,
"loss": 1.102191162109375,
"step": 1980
},
{
"epoch": 0.15537600994406464,
"grad_norm": 0.93359375,
"learning_rate": 5.0025e-05,
"loss": 1.1058999061584474,
"step": 2000
},
{
"epoch": 0.15692977004350528,
"grad_norm": 0.8984375,
"learning_rate": 4.9525000000000004e-05,
"loss": 1.1077130317687989,
"step": 2020
},
{
"epoch": 0.15848353014294592,
"grad_norm": 0.91796875,
"learning_rate": 4.9025000000000006e-05,
"loss": 1.0922195434570312,
"step": 2040
},
{
"epoch": 0.16003729024238658,
"grad_norm": 0.92578125,
"learning_rate": 4.8525e-05,
"loss": 1.130363941192627,
"step": 2060
},
{
"epoch": 0.16159105034182722,
"grad_norm": 0.8984375,
"learning_rate": 4.8025e-05,
"loss": 1.0666452407836915,
"step": 2080
},
{
"epoch": 0.16314481044126786,
"grad_norm": 0.91015625,
"learning_rate": 4.7525e-05,
"loss": 1.096773052215576,
"step": 2100
},
{
"epoch": 0.16469857054070852,
"grad_norm": 0.9296875,
"learning_rate": 4.7025000000000005e-05,
"loss": 1.0890857696533203,
"step": 2120
},
{
"epoch": 0.16625233064014916,
"grad_norm": 0.91015625,
"learning_rate": 4.6525e-05,
"loss": 1.1071590423583983,
"step": 2140
},
{
"epoch": 0.1678060907395898,
"grad_norm": 0.87890625,
"learning_rate": 4.6025e-05,
"loss": 1.1148256301879882,
"step": 2160
},
{
"epoch": 0.16935985083903046,
"grad_norm": 0.92578125,
"learning_rate": 4.5525e-05,
"loss": 1.090849018096924,
"step": 2180
},
{
"epoch": 0.1709136109384711,
"grad_norm": 0.921875,
"learning_rate": 4.5025000000000003e-05,
"loss": 1.1214483261108399,
"step": 2200
},
{
"epoch": 0.17246737103791174,
"grad_norm": 0.89453125,
"learning_rate": 4.4525e-05,
"loss": 1.099250030517578,
"step": 2220
},
{
"epoch": 0.1740211311373524,
"grad_norm": 0.90234375,
"learning_rate": 4.4025e-05,
"loss": 1.0906652450561523,
"step": 2240
},
{
"epoch": 0.17557489123679304,
"grad_norm": 0.875,
"learning_rate": 4.352500000000001e-05,
"loss": 1.0940235137939454,
"step": 2260
},
{
"epoch": 0.17712865133623368,
"grad_norm": 0.875,
"learning_rate": 4.3025e-05,
"loss": 1.0771520614624024,
"step": 2280
},
{
"epoch": 0.17868241143567434,
"grad_norm": 0.875,
"learning_rate": 4.2525000000000004e-05,
"loss": 1.0870559692382813,
"step": 2300
},
{
"epoch": 0.18023617153511498,
"grad_norm": 0.90625,
"learning_rate": 4.2025000000000005e-05,
"loss": 1.0863205909729003,
"step": 2320
},
{
"epoch": 0.18178993163455562,
"grad_norm": 0.9296875,
"learning_rate": 4.1525e-05,
"loss": 1.0996931076049805,
"step": 2340
},
{
"epoch": 0.18334369173399628,
"grad_norm": 0.875,
"learning_rate": 4.1025e-05,
"loss": 1.0987712860107421,
"step": 2360
},
{
"epoch": 0.18489745183343692,
"grad_norm": 0.9296875,
"learning_rate": 4.0525e-05,
"loss": 1.1171295166015625,
"step": 2380
},
{
"epoch": 0.18645121193287756,
"grad_norm": 0.9296875,
"learning_rate": 4.0025000000000004e-05,
"loss": 1.103907012939453,
"step": 2400
},
{
"epoch": 0.18800497203231822,
"grad_norm": 0.94140625,
"learning_rate": 3.9525e-05,
"loss": 1.090281867980957,
"step": 2420
},
{
"epoch": 0.18955873213175886,
"grad_norm": 0.9140625,
"learning_rate": 3.9025e-05,
"loss": 1.0880367279052734,
"step": 2440
},
{
"epoch": 0.1911124922311995,
"grad_norm": 0.90625,
"learning_rate": 3.8525e-05,
"loss": 1.0839120864868164,
"step": 2460
},
{
"epoch": 0.19266625233064014,
"grad_norm": 0.9375,
"learning_rate": 3.8025e-05,
"loss": 1.1069268226623534,
"step": 2480
},
{
"epoch": 0.1942200124300808,
"grad_norm": 0.87109375,
"learning_rate": 3.7525e-05,
"loss": 1.0701780319213867,
"step": 2500
},
{
"epoch": 0.19577377252952144,
"grad_norm": 0.921875,
"learning_rate": 3.7025000000000005e-05,
"loss": 1.1017258644104004,
"step": 2520
},
{
"epoch": 0.19732753262896208,
"grad_norm": 0.9140625,
"learning_rate": 3.652500000000001e-05,
"loss": 1.1134037017822265,
"step": 2540
},
{
"epoch": 0.19888129272840274,
"grad_norm": 0.9453125,
"learning_rate": 3.6025e-05,
"loss": 1.0741355895996094,
"step": 2560
},
{
"epoch": 0.20043505282784338,
"grad_norm": 0.890625,
"learning_rate": 3.5525e-05,
"loss": 1.0729135513305663,
"step": 2580
},
{
"epoch": 0.20198881292728402,
"grad_norm": 0.921875,
"learning_rate": 3.5025000000000004e-05,
"loss": 1.0833280563354493,
"step": 2600
},
{
"epoch": 0.20354257302672468,
"grad_norm": 0.8984375,
"learning_rate": 3.4525e-05,
"loss": 1.1003621101379395,
"step": 2620
},
{
"epoch": 0.20509633312616532,
"grad_norm": 0.8984375,
"learning_rate": 3.4025e-05,
"loss": 1.0958992004394532,
"step": 2640
},
{
"epoch": 0.20665009322560596,
"grad_norm": 0.90625,
"learning_rate": 3.3525e-05,
"loss": 1.098832893371582,
"step": 2660
},
{
"epoch": 0.20820385332504662,
"grad_norm": 0.89453125,
"learning_rate": 3.3025e-05,
"loss": 1.0823541641235352,
"step": 2680
},
{
"epoch": 0.20975761342448726,
"grad_norm": 0.91015625,
"learning_rate": 3.2525e-05,
"loss": 1.0849027633666992,
"step": 2700
},
{
"epoch": 0.2113113735239279,
"grad_norm": 0.890625,
"learning_rate": 3.2025e-05,
"loss": 1.0959321975708007,
"step": 2720
},
{
"epoch": 0.21286513362336856,
"grad_norm": 0.88671875,
"learning_rate": 3.1525e-05,
"loss": 1.083869743347168,
"step": 2740
},
{
"epoch": 0.2144188937228092,
"grad_norm": 0.8984375,
"learning_rate": 3.1025e-05,
"loss": 1.0912399291992188,
"step": 2760
},
{
"epoch": 0.21597265382224984,
"grad_norm": 0.9140625,
"learning_rate": 3.0525e-05,
"loss": 1.088266944885254,
"step": 2780
},
{
"epoch": 0.2175264139216905,
"grad_norm": 0.86328125,
"learning_rate": 3.0025000000000005e-05,
"loss": 1.0917407035827638,
"step": 2800
},
{
"epoch": 0.21908017402113114,
"grad_norm": 0.88671875,
"learning_rate": 2.9525000000000003e-05,
"loss": 1.088615608215332,
"step": 2820
},
{
"epoch": 0.22063393412057178,
"grad_norm": 0.89453125,
"learning_rate": 2.9025e-05,
"loss": 1.0885720252990723,
"step": 2840
},
{
"epoch": 0.22218769422001244,
"grad_norm": 0.8828125,
"learning_rate": 2.8525000000000002e-05,
"loss": 1.072688388824463,
"step": 2860
},
{
"epoch": 0.22374145431945308,
"grad_norm": 0.89453125,
"learning_rate": 2.8025e-05,
"loss": 1.0892706871032716,
"step": 2880
},
{
"epoch": 0.22529521441889372,
"grad_norm": 0.93359375,
"learning_rate": 2.7525e-05,
"loss": 1.083917236328125,
"step": 2900
},
{
"epoch": 0.22684897451833436,
"grad_norm": 0.921875,
"learning_rate": 2.7025e-05,
"loss": 1.1079219818115233,
"step": 2920
},
{
"epoch": 0.22840273461777502,
"grad_norm": 0.91796875,
"learning_rate": 2.6525e-05,
"loss": 1.0851898193359375,
"step": 2940
},
{
"epoch": 0.22995649471721566,
"grad_norm": 0.91015625,
"learning_rate": 2.6025e-05,
"loss": 1.0698064804077148,
"step": 2960
},
{
"epoch": 0.2315102548166563,
"grad_norm": 0.8828125,
"learning_rate": 2.5525e-05,
"loss": 1.0661302566528321,
"step": 2980
},
{
"epoch": 0.23306401491609696,
"grad_norm": 0.9140625,
"learning_rate": 2.5025e-05,
"loss": 1.0647387504577637,
"step": 3000
},
{
"epoch": 0.2346177750155376,
"grad_norm": 0.8984375,
"learning_rate": 2.4525e-05,
"loss": 1.074313259124756,
"step": 3020
},
{
"epoch": 0.23617153511497824,
"grad_norm": 0.9140625,
"learning_rate": 2.4025e-05,
"loss": 1.0964820861816407,
"step": 3040
},
{
"epoch": 0.2377252952144189,
"grad_norm": 0.89453125,
"learning_rate": 2.3525e-05,
"loss": 1.0810314178466798,
"step": 3060
},
{
"epoch": 0.23927905531385954,
"grad_norm": 0.9296875,
"learning_rate": 2.3025e-05,
"loss": 1.078106689453125,
"step": 3080
},
{
"epoch": 0.24083281541330018,
"grad_norm": 0.90234375,
"learning_rate": 2.2525000000000002e-05,
"loss": 1.0860605239868164,
"step": 3100
},
{
"epoch": 0.24238657551274084,
"grad_norm": 0.88671875,
"learning_rate": 2.2025e-05,
"loss": 1.0743337631225587,
"step": 3120
},
{
"epoch": 0.24394033561218148,
"grad_norm": 0.88671875,
"learning_rate": 2.1525e-05,
"loss": 1.0778118133544923,
"step": 3140
},
{
"epoch": 0.24549409571162212,
"grad_norm": 0.9296875,
"learning_rate": 2.1025e-05,
"loss": 1.0981425285339355,
"step": 3160
},
{
"epoch": 0.24704785581106278,
"grad_norm": 0.91015625,
"learning_rate": 2.0525e-05,
"loss": 1.0836584091186523,
"step": 3180
},
{
"epoch": 0.24860161591050342,
"grad_norm": 0.8671875,
"learning_rate": 2.0025000000000002e-05,
"loss": 1.089561367034912,
"step": 3200
},
{
"epoch": 0.2501553760099441,
"grad_norm": 0.9296875,
"learning_rate": 1.9525e-05,
"loss": 1.0788454055786132,
"step": 3220
},
{
"epoch": 0.2517091361093847,
"grad_norm": 0.91015625,
"learning_rate": 1.9025e-05,
"loss": 1.078543758392334,
"step": 3240
},
{
"epoch": 0.25326289620882536,
"grad_norm": 0.9140625,
"learning_rate": 1.8525e-05,
"loss": 1.1045896530151367,
"step": 3260
},
{
"epoch": 0.254816656308266,
"grad_norm": 0.88671875,
"learning_rate": 1.8025e-05,
"loss": 1.0857179641723633,
"step": 3280
},
{
"epoch": 0.25637041640770664,
"grad_norm": 0.87109375,
"learning_rate": 1.7525e-05,
"loss": 1.0980566024780274,
"step": 3300
},
{
"epoch": 0.2579241765071473,
"grad_norm": 0.90234375,
"learning_rate": 1.7025e-05,
"loss": 1.0898794174194335,
"step": 3320
},
{
"epoch": 0.25947793660658797,
"grad_norm": 0.93359375,
"learning_rate": 1.6525000000000002e-05,
"loss": 1.0813383102416991,
"step": 3340
},
{
"epoch": 0.2610316967060286,
"grad_norm": 0.9140625,
"learning_rate": 1.6025e-05,
"loss": 1.0852994918823242,
"step": 3360
},
{
"epoch": 0.26258545680546924,
"grad_norm": 0.9296875,
"learning_rate": 1.5525e-05,
"loss": 1.0736425399780274,
"step": 3380
},
{
"epoch": 0.2641392169049099,
"grad_norm": 0.87890625,
"learning_rate": 1.5025000000000001e-05,
"loss": 1.0873353004455566,
"step": 3400
},
{
"epoch": 0.2656929770043505,
"grad_norm": 0.90625,
"learning_rate": 1.4524999999999999e-05,
"loss": 1.0847795486450196,
"step": 3420
},
{
"epoch": 0.26724673710379115,
"grad_norm": 0.90234375,
"learning_rate": 1.4025000000000002e-05,
"loss": 1.0983787536621095,
"step": 3440
},
{
"epoch": 0.26880049720323185,
"grad_norm": 0.8828125,
"learning_rate": 1.3525000000000002e-05,
"loss": 1.0719210624694824,
"step": 3460
},
{
"epoch": 0.2703542573026725,
"grad_norm": 0.87890625,
"learning_rate": 1.3025000000000002e-05,
"loss": 1.0850942611694336,
"step": 3480
},
{
"epoch": 0.2719080174021131,
"grad_norm": 0.890625,
"learning_rate": 1.2525000000000001e-05,
"loss": 1.0767460823059083,
"step": 3500
},
{
"epoch": 0.27346177750155376,
"grad_norm": 0.90234375,
"learning_rate": 1.2025000000000001e-05,
"loss": 1.0981364250183105,
"step": 3520
},
{
"epoch": 0.2750155376009944,
"grad_norm": 0.90234375,
"learning_rate": 1.1525e-05,
"loss": 1.0713367462158203,
"step": 3540
},
{
"epoch": 0.27656929770043504,
"grad_norm": 0.8984375,
"learning_rate": 1.1025e-05,
"loss": 1.074637222290039,
"step": 3560
},
{
"epoch": 0.2781230577998757,
"grad_norm": 0.91015625,
"learning_rate": 1.0525e-05,
"loss": 1.0917478561401368,
"step": 3580
},
{
"epoch": 0.27967681789931637,
"grad_norm": 0.8828125,
"learning_rate": 1.0025000000000001e-05,
"loss": 1.0934381484985352,
"step": 3600
},
{
"epoch": 0.281230577998757,
"grad_norm": 0.953125,
"learning_rate": 9.525000000000001e-06,
"loss": 1.0998394966125489,
"step": 3620
},
{
"epoch": 0.28278433809819764,
"grad_norm": 0.8828125,
"learning_rate": 9.025e-06,
"loss": 1.087621021270752,
"step": 3640
},
{
"epoch": 0.2843380981976383,
"grad_norm": 0.91015625,
"learning_rate": 8.525e-06,
"loss": 1.091860866546631,
"step": 3660
},
{
"epoch": 0.2858918582970789,
"grad_norm": 0.9375,
"learning_rate": 8.025e-06,
"loss": 1.072180938720703,
"step": 3680
},
{
"epoch": 0.28744561839651955,
"grad_norm": 0.8828125,
"learning_rate": 7.525e-06,
"loss": 1.0778998374938964,
"step": 3700
},
{
"epoch": 0.28899937849596025,
"grad_norm": 0.875,
"learning_rate": 7.025000000000001e-06,
"loss": 1.0709803581237793,
"step": 3720
},
{
"epoch": 0.2905531385954009,
"grad_norm": 0.91015625,
"learning_rate": 6.525e-06,
"loss": 1.1010093688964844,
"step": 3740
},
{
"epoch": 0.2921068986948415,
"grad_norm": 0.91015625,
"learning_rate": 6.025e-06,
"loss": 1.0703121185302735,
"step": 3760
},
{
"epoch": 0.29366065879428216,
"grad_norm": 0.88671875,
"learning_rate": 5.5250000000000005e-06,
"loss": 1.0773000717163086,
"step": 3780
},
{
"epoch": 0.2952144188937228,
"grad_norm": 0.91796875,
"learning_rate": 5.025e-06,
"loss": 1.0894964218139649,
"step": 3800
},
{
"epoch": 0.29676817899316343,
"grad_norm": 0.87890625,
"learning_rate": 4.525e-06,
"loss": 1.060502815246582,
"step": 3820
},
{
"epoch": 0.2983219390926041,
"grad_norm": 0.890625,
"learning_rate": 4.0250000000000004e-06,
"loss": 1.0844976425170898,
"step": 3840
},
{
"epoch": 0.29987569919204476,
"grad_norm": 0.90625,
"learning_rate": 3.5249999999999997e-06,
"loss": 1.068390464782715,
"step": 3860
},
{
"epoch": 0.3014294592914854,
"grad_norm": 0.8828125,
"learning_rate": 3.0250000000000003e-06,
"loss": 1.0733464241027832,
"step": 3880
},
{
"epoch": 0.30298321939092604,
"grad_norm": 0.92578125,
"learning_rate": 2.5250000000000004e-06,
"loss": 1.0655000686645508,
"step": 3900
},
{
"epoch": 0.3045369794903667,
"grad_norm": 0.88671875,
"learning_rate": 2.025e-06,
"loss": 1.0655412673950195,
"step": 3920
},
{
"epoch": 0.3060907395898073,
"grad_norm": 0.90234375,
"learning_rate": 1.525e-06,
"loss": 1.0558664321899414,
"step": 3940
},
{
"epoch": 0.307644499689248,
"grad_norm": 0.94140625,
"learning_rate": 1.0250000000000001e-06,
"loss": 1.1024866104125977,
"step": 3960
},
{
"epoch": 0.30919825978868865,
"grad_norm": 0.91015625,
"learning_rate": 5.250000000000001e-07,
"loss": 1.0651491165161133,
"step": 3980
},
{
"epoch": 0.3107520198881293,
"grad_norm": 0.9609375,
"learning_rate": 2.5000000000000002e-08,
"loss": 1.0959912300109864,
"step": 4000
}
],
"logging_steps": 20,
"max_steps": 4000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 9.3750546137088e+17,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}