melanoma / trainer_state.json
Khaled's picture
Upload folder using huggingface_hub
0e45980 verified
{
"best_metric": 0.9536241841291653,
"best_model_checkpoint": "melanoma-v2\\checkpoint-8535",
"epoch": 5.0,
"eval_steps": 500,
"global_step": 8535,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.005858230814294083,
"grad_norm": 11.994911193847656,
"learning_rate": 4.098360655737705e-06,
"loss": 0.6539,
"step": 10
},
{
"epoch": 0.011716461628588167,
"grad_norm": 18.7818603515625,
"learning_rate": 9.36768149882904e-06,
"loss": 0.6538,
"step": 20
},
{
"epoch": 0.01757469244288225,
"grad_norm": 7.247508525848389,
"learning_rate": 1.5222482435597189e-05,
"loss": 0.648,
"step": 30
},
{
"epoch": 0.023432923257176334,
"grad_norm": 23.158416748046875,
"learning_rate": 2.107728337236534e-05,
"loss": 0.6352,
"step": 40
},
{
"epoch": 0.029291154071470416,
"grad_norm": 12.200428009033203,
"learning_rate": 2.693208430913349e-05,
"loss": 0.609,
"step": 50
},
{
"epoch": 0.0351493848857645,
"grad_norm": 10.321218490600586,
"learning_rate": 3.278688524590164e-05,
"loss": 0.6022,
"step": 60
},
{
"epoch": 0.041007615700058585,
"grad_norm": 35.08419418334961,
"learning_rate": 3.864168618266979e-05,
"loss": 0.5575,
"step": 70
},
{
"epoch": 0.04686584651435267,
"grad_norm": 27.759618759155273,
"learning_rate": 4.3911007025761124e-05,
"loss": 0.514,
"step": 80
},
{
"epoch": 0.05272407732864675,
"grad_norm": 16.956911087036133,
"learning_rate": 4.9180327868852456e-05,
"loss": 0.5048,
"step": 90
},
{
"epoch": 0.05858230814294083,
"grad_norm": 23.36929702758789,
"learning_rate": 5.5035128805620606e-05,
"loss": 0.4818,
"step": 100
},
{
"epoch": 0.06444053895723492,
"grad_norm": 35.85573196411133,
"learning_rate": 6.0889929742388756e-05,
"loss": 0.4162,
"step": 110
},
{
"epoch": 0.070298769771529,
"grad_norm": 23.913671493530273,
"learning_rate": 6.674473067915692e-05,
"loss": 0.4388,
"step": 120
},
{
"epoch": 0.07615700058582309,
"grad_norm": 8.763222694396973,
"learning_rate": 7.259953161592506e-05,
"loss": 0.3439,
"step": 130
},
{
"epoch": 0.08201523140011717,
"grad_norm": 8.551299095153809,
"learning_rate": 7.845433255269322e-05,
"loss": 0.3399,
"step": 140
},
{
"epoch": 0.08787346221441125,
"grad_norm": 36.60219955444336,
"learning_rate": 8.430913348946136e-05,
"loss": 0.353,
"step": 150
},
{
"epoch": 0.09373169302870533,
"grad_norm": 16.17277717590332,
"learning_rate": 9.016393442622952e-05,
"loss": 0.3191,
"step": 160
},
{
"epoch": 0.09958992384299942,
"grad_norm": 22.695581436157227,
"learning_rate": 9.601873536299766e-05,
"loss": 0.3274,
"step": 170
},
{
"epoch": 0.1054481546572935,
"grad_norm": 14.668584823608398,
"learning_rate": 0.00010187353629976582,
"loss": 0.206,
"step": 180
},
{
"epoch": 0.11130638547158758,
"grad_norm": 10.050411224365234,
"learning_rate": 0.00010772833723653396,
"loss": 0.2838,
"step": 190
},
{
"epoch": 0.11716461628588166,
"grad_norm": 8.341797828674316,
"learning_rate": 0.0001135831381733021,
"loss": 0.2025,
"step": 200
},
{
"epoch": 0.12302284710017575,
"grad_norm": 12.642868995666504,
"learning_rate": 0.00011943793911007025,
"loss": 0.2302,
"step": 210
},
{
"epoch": 0.12888107791446984,
"grad_norm": 6.499959945678711,
"learning_rate": 0.00012529274004683842,
"loss": 0.1666,
"step": 220
},
{
"epoch": 0.1347393087287639,
"grad_norm": 26.433914184570312,
"learning_rate": 0.00013114754098360657,
"loss": 0.2277,
"step": 230
},
{
"epoch": 0.140597539543058,
"grad_norm": 22.857593536376953,
"learning_rate": 0.00013700234192037472,
"loss": 0.2807,
"step": 240
},
{
"epoch": 0.14645577035735208,
"grad_norm": 6.599829196929932,
"learning_rate": 0.00014285714285714284,
"loss": 0.218,
"step": 250
},
{
"epoch": 0.15231400117164617,
"grad_norm": 14.18704605102539,
"learning_rate": 0.00014871194379391102,
"loss": 0.2214,
"step": 260
},
{
"epoch": 0.15817223198594024,
"grad_norm": 6.61388635635376,
"learning_rate": 0.00015456674473067917,
"loss": 0.1327,
"step": 270
},
{
"epoch": 0.16403046280023434,
"grad_norm": 7.001180171966553,
"learning_rate": 0.00016042154566744732,
"loss": 0.1527,
"step": 280
},
{
"epoch": 0.1698886936145284,
"grad_norm": 17.781526565551758,
"learning_rate": 0.00016627634660421544,
"loss": 0.2506,
"step": 290
},
{
"epoch": 0.1757469244288225,
"grad_norm": 11.289440155029297,
"learning_rate": 0.00017213114754098362,
"loss": 0.2424,
"step": 300
},
{
"epoch": 0.18160515524311657,
"grad_norm": 14.80530834197998,
"learning_rate": 0.00017798594847775177,
"loss": 0.2481,
"step": 310
},
{
"epoch": 0.18746338605741067,
"grad_norm": 5.781233310699463,
"learning_rate": 0.00018384074941451992,
"loss": 0.203,
"step": 320
},
{
"epoch": 0.19332161687170474,
"grad_norm": 4.2908477783203125,
"learning_rate": 0.00018969555035128804,
"loss": 0.1923,
"step": 330
},
{
"epoch": 0.19917984768599883,
"grad_norm": 3.176772117614746,
"learning_rate": 0.00019555035128805622,
"loss": 0.1573,
"step": 340
},
{
"epoch": 0.2050380785002929,
"grad_norm": 22.84259605407715,
"learning_rate": 0.00020140515222482437,
"loss": 0.2,
"step": 350
},
{
"epoch": 0.210896309314587,
"grad_norm": 3.60316801071167,
"learning_rate": 0.00020725995316159252,
"loss": 0.2558,
"step": 360
},
{
"epoch": 0.21675454012888107,
"grad_norm": 15.83247184753418,
"learning_rate": 0.00021311475409836064,
"loss": 0.1953,
"step": 370
},
{
"epoch": 0.22261277094317516,
"grad_norm": 3.053990602493286,
"learning_rate": 0.00021896955503512882,
"loss": 0.2094,
"step": 380
},
{
"epoch": 0.22847100175746923,
"grad_norm": 4.581020355224609,
"learning_rate": 0.00022482435597189697,
"loss": 0.166,
"step": 390
},
{
"epoch": 0.23432923257176333,
"grad_norm": 4.517653465270996,
"learning_rate": 0.00023067915690866512,
"loss": 0.1749,
"step": 400
},
{
"epoch": 0.2401874633860574,
"grad_norm": 15.956161499023438,
"learning_rate": 0.00023653395784543327,
"loss": 0.2458,
"step": 410
},
{
"epoch": 0.2460456942003515,
"grad_norm": 3.738513231277466,
"learning_rate": 0.0002423887587822014,
"loss": 0.2622,
"step": 420
},
{
"epoch": 0.2519039250146456,
"grad_norm": 4.261665344238281,
"learning_rate": 0.00024824355971896957,
"loss": 0.21,
"step": 430
},
{
"epoch": 0.2577621558289397,
"grad_norm": 15.049210548400879,
"learning_rate": 0.0002540983606557377,
"loss": 0.2058,
"step": 440
},
{
"epoch": 0.26362038664323373,
"grad_norm": 2.9831387996673584,
"learning_rate": 0.00025995316159250587,
"loss": 0.1936,
"step": 450
},
{
"epoch": 0.2694786174575278,
"grad_norm": 7.315589904785156,
"learning_rate": 0.000265807962529274,
"loss": 0.2204,
"step": 460
},
{
"epoch": 0.2753368482718219,
"grad_norm": 9.33542251586914,
"learning_rate": 0.00027166276346604217,
"loss": 0.2543,
"step": 470
},
{
"epoch": 0.281195079086116,
"grad_norm": 4.937420845031738,
"learning_rate": 0.00027751756440281026,
"loss": 0.2941,
"step": 480
},
{
"epoch": 0.28705330990041006,
"grad_norm": 1.6123785972595215,
"learning_rate": 0.00028337236533957846,
"loss": 0.2231,
"step": 490
},
{
"epoch": 0.29291154071470415,
"grad_norm": 2.655095338821411,
"learning_rate": 0.0002892271662763466,
"loss": 0.16,
"step": 500
},
{
"epoch": 0.29876977152899825,
"grad_norm": 5.088364601135254,
"learning_rate": 0.00029508196721311476,
"loss": 0.2443,
"step": 510
},
{
"epoch": 0.30462800234329235,
"grad_norm": 13.077324867248535,
"learning_rate": 0.0003009367681498829,
"loss": 0.2526,
"step": 520
},
{
"epoch": 0.3104862331575864,
"grad_norm": 3.225377082824707,
"learning_rate": 0.00030679156908665106,
"loss": 0.2461,
"step": 530
},
{
"epoch": 0.3163444639718805,
"grad_norm": 2.392651319503784,
"learning_rate": 0.0003126463700234192,
"loss": 0.2216,
"step": 540
},
{
"epoch": 0.3222026947861746,
"grad_norm": 1.9468374252319336,
"learning_rate": 0.00031850117096018736,
"loss": 0.1632,
"step": 550
},
{
"epoch": 0.3280609256004687,
"grad_norm": 1.429714560508728,
"learning_rate": 0.0003243559718969555,
"loss": 0.232,
"step": 560
},
{
"epoch": 0.3339191564147627,
"grad_norm": 2.8054699897766113,
"learning_rate": 0.00033021077283372366,
"loss": 0.1954,
"step": 570
},
{
"epoch": 0.3397773872290568,
"grad_norm": 3.628207206726074,
"learning_rate": 0.0003360655737704918,
"loss": 0.1579,
"step": 580
},
{
"epoch": 0.3456356180433509,
"grad_norm": 1.0688583850860596,
"learning_rate": 0.00034192037470725996,
"loss": 0.2699,
"step": 590
},
{
"epoch": 0.351493848857645,
"grad_norm": 5.8670220375061035,
"learning_rate": 0.0003477751756440281,
"loss": 0.2075,
"step": 600
},
{
"epoch": 0.35735207967193905,
"grad_norm": 2.418936252593994,
"learning_rate": 0.00035362997658079626,
"loss": 0.2845,
"step": 610
},
{
"epoch": 0.36321031048623315,
"grad_norm": 1.8158751726150513,
"learning_rate": 0.0003594847775175644,
"loss": 0.1336,
"step": 620
},
{
"epoch": 0.36906854130052724,
"grad_norm": 3.2167751789093018,
"learning_rate": 0.00036533957845433256,
"loss": 0.1885,
"step": 630
},
{
"epoch": 0.37492677211482134,
"grad_norm": 0.4845765233039856,
"learning_rate": 0.0003711943793911007,
"loss": 0.179,
"step": 640
},
{
"epoch": 0.38078500292911543,
"grad_norm": 1.4025683403015137,
"learning_rate": 0.0003770491803278688,
"loss": 0.1934,
"step": 650
},
{
"epoch": 0.3866432337434095,
"grad_norm": 3.7113711833953857,
"learning_rate": 0.000382903981264637,
"loss": 0.2392,
"step": 660
},
{
"epoch": 0.39250146455770357,
"grad_norm": 0.9925859570503235,
"learning_rate": 0.00038875878220140516,
"loss": 0.1762,
"step": 670
},
{
"epoch": 0.39835969537199767,
"grad_norm": 2.3232357501983643,
"learning_rate": 0.0003946135831381733,
"loss": 0.3168,
"step": 680
},
{
"epoch": 0.40421792618629176,
"grad_norm": 2.2389273643493652,
"learning_rate": 0.00040046838407494146,
"loss": 0.1604,
"step": 690
},
{
"epoch": 0.4100761570005858,
"grad_norm": 3.8855361938476562,
"learning_rate": 0.0004063231850117096,
"loss": 0.1886,
"step": 700
},
{
"epoch": 0.4159343878148799,
"grad_norm": 3.099923610687256,
"learning_rate": 0.00041217798594847776,
"loss": 0.2951,
"step": 710
},
{
"epoch": 0.421792618629174,
"grad_norm": 4.365396022796631,
"learning_rate": 0.0004180327868852459,
"loss": 0.1872,
"step": 720
},
{
"epoch": 0.4276508494434681,
"grad_norm": 1.8989065885543823,
"learning_rate": 0.00042388758782201406,
"loss": 0.204,
"step": 730
},
{
"epoch": 0.43350908025776214,
"grad_norm": 1.8920644521713257,
"learning_rate": 0.0004297423887587822,
"loss": 0.2229,
"step": 740
},
{
"epoch": 0.43936731107205623,
"grad_norm": 2.187156915664673,
"learning_rate": 0.00043559718969555036,
"loss": 0.1378,
"step": 750
},
{
"epoch": 0.44522554188635033,
"grad_norm": 3.7022440433502197,
"learning_rate": 0.0004414519906323185,
"loss": 0.2533,
"step": 760
},
{
"epoch": 0.4510837727006444,
"grad_norm": 2.7125163078308105,
"learning_rate": 0.00044730679156908666,
"loss": 0.1884,
"step": 770
},
{
"epoch": 0.45694200351493847,
"grad_norm": 3.3647499084472656,
"learning_rate": 0.0004531615925058548,
"loss": 0.2834,
"step": 780
},
{
"epoch": 0.46280023432923256,
"grad_norm": 0.8961579203605652,
"learning_rate": 0.00045901639344262296,
"loss": 0.228,
"step": 790
},
{
"epoch": 0.46865846514352666,
"grad_norm": 1.321513056755066,
"learning_rate": 0.0004648711943793911,
"loss": 0.1791,
"step": 800
},
{
"epoch": 0.47451669595782076,
"grad_norm": 1.7761132717132568,
"learning_rate": 0.00047072599531615926,
"loss": 0.2495,
"step": 810
},
{
"epoch": 0.4803749267721148,
"grad_norm": 2.2934768199920654,
"learning_rate": 0.0004765807962529274,
"loss": 0.201,
"step": 820
},
{
"epoch": 0.4862331575864089,
"grad_norm": 2.120138645172119,
"learning_rate": 0.00048243559718969556,
"loss": 0.2572,
"step": 830
},
{
"epoch": 0.492091388400703,
"grad_norm": 1.623761534690857,
"learning_rate": 0.0004882903981264637,
"loss": 0.2508,
"step": 840
},
{
"epoch": 0.4979496192149971,
"grad_norm": 1.8615626096725464,
"learning_rate": 0.0004941451990632318,
"loss": 0.2244,
"step": 850
},
{
"epoch": 0.5038078500292912,
"grad_norm": 1.1661752462387085,
"learning_rate": 0.0005,
"loss": 0.258,
"step": 860
},
{
"epoch": 0.5096660808435852,
"grad_norm": 1.3036870956420898,
"learning_rate": 0.0004993490430933472,
"loss": 0.2324,
"step": 870
},
{
"epoch": 0.5155243116578794,
"grad_norm": 3.418734073638916,
"learning_rate": 0.0004986980861866944,
"loss": 0.2483,
"step": 880
},
{
"epoch": 0.5213825424721734,
"grad_norm": 1.805756688117981,
"learning_rate": 0.0004980471292800417,
"loss": 0.1911,
"step": 890
},
{
"epoch": 0.5272407732864675,
"grad_norm": 0.6934542655944824,
"learning_rate": 0.0004973961723733889,
"loss": 0.2273,
"step": 900
},
{
"epoch": 0.5330990041007616,
"grad_norm": 1.0683369636535645,
"learning_rate": 0.0004967452154667361,
"loss": 0.1814,
"step": 910
},
{
"epoch": 0.5389572349150556,
"grad_norm": 0.795211672782898,
"learning_rate": 0.0004960942585600834,
"loss": 0.2065,
"step": 920
},
{
"epoch": 0.5448154657293497,
"grad_norm": 0.7547960877418518,
"learning_rate": 0.0004954433016534306,
"loss": 0.147,
"step": 930
},
{
"epoch": 0.5506736965436438,
"grad_norm": 0.7984468340873718,
"learning_rate": 0.0004947923447467778,
"loss": 0.2025,
"step": 940
},
{
"epoch": 0.5565319273579379,
"grad_norm": 0.9543431997299194,
"learning_rate": 0.000494141387840125,
"loss": 0.2016,
"step": 950
},
{
"epoch": 0.562390158172232,
"grad_norm": 0.5806155800819397,
"learning_rate": 0.0004934904309334722,
"loss": 0.1749,
"step": 960
},
{
"epoch": 0.5682483889865261,
"grad_norm": 0.5972631573677063,
"learning_rate": 0.0004928394740268194,
"loss": 0.176,
"step": 970
},
{
"epoch": 0.5741066198008201,
"grad_norm": 0.4694497585296631,
"learning_rate": 0.0004921885171201666,
"loss": 0.2667,
"step": 980
},
{
"epoch": 0.5799648506151143,
"grad_norm": 0.84099942445755,
"learning_rate": 0.0004915375602135138,
"loss": 0.1912,
"step": 990
},
{
"epoch": 0.5858230814294083,
"grad_norm": 2.690175771713257,
"learning_rate": 0.0004908866033068611,
"loss": 0.1685,
"step": 1000
},
{
"epoch": 0.5916813122437024,
"grad_norm": 1.2537461519241333,
"learning_rate": 0.0004902356464002083,
"loss": 0.2628,
"step": 1010
},
{
"epoch": 0.5975395430579965,
"grad_norm": 3.283203363418579,
"learning_rate": 0.0004895846894935555,
"loss": 0.2186,
"step": 1020
},
{
"epoch": 0.6033977738722905,
"grad_norm": 1.1899466514587402,
"learning_rate": 0.0004889337325869028,
"loss": 0.178,
"step": 1030
},
{
"epoch": 0.6092560046865847,
"grad_norm": 2.0233116149902344,
"learning_rate": 0.00048828277568025,
"loss": 0.2323,
"step": 1040
},
{
"epoch": 0.6151142355008787,
"grad_norm": 1.075803279876709,
"learning_rate": 0.0004876318187735972,
"loss": 0.1844,
"step": 1050
},
{
"epoch": 0.6209724663151728,
"grad_norm": 1.9664595127105713,
"learning_rate": 0.0004869808618669444,
"loss": 0.1697,
"step": 1060
},
{
"epoch": 0.6268306971294669,
"grad_norm": 0.497203528881073,
"learning_rate": 0.0004863299049602916,
"loss": 0.1803,
"step": 1070
},
{
"epoch": 0.632688927943761,
"grad_norm": 1.3227007389068604,
"learning_rate": 0.00048567894805363887,
"loss": 0.2132,
"step": 1080
},
{
"epoch": 0.6385471587580551,
"grad_norm": 1.1600663661956787,
"learning_rate": 0.0004850279911469861,
"loss": 0.2151,
"step": 1090
},
{
"epoch": 0.6444053895723492,
"grad_norm": 1.4555237293243408,
"learning_rate": 0.0004843770342403333,
"loss": 0.2224,
"step": 1100
},
{
"epoch": 0.6502636203866432,
"grad_norm": 1.7892956733703613,
"learning_rate": 0.0004837260773336805,
"loss": 0.2004,
"step": 1110
},
{
"epoch": 0.6561218512009374,
"grad_norm": 0.6487146615982056,
"learning_rate": 0.00048307512042702774,
"loss": 0.136,
"step": 1120
},
{
"epoch": 0.6619800820152314,
"grad_norm": 1.258460521697998,
"learning_rate": 0.00048242416352037494,
"loss": 0.1574,
"step": 1130
},
{
"epoch": 0.6678383128295254,
"grad_norm": 1.852683424949646,
"learning_rate": 0.0004817732066137222,
"loss": 0.1983,
"step": 1140
},
{
"epoch": 0.6736965436438196,
"grad_norm": 0.7946725487709045,
"learning_rate": 0.0004811222497070694,
"loss": 0.2243,
"step": 1150
},
{
"epoch": 0.6795547744581136,
"grad_norm": 2.0183424949645996,
"learning_rate": 0.0004804712928004166,
"loss": 0.2063,
"step": 1160
},
{
"epoch": 0.6854130052724078,
"grad_norm": 1.2471932172775269,
"learning_rate": 0.00047982033589376386,
"loss": 0.1675,
"step": 1170
},
{
"epoch": 0.6912712360867018,
"grad_norm": 1.7430349588394165,
"learning_rate": 0.00047916937898711105,
"loss": 0.1867,
"step": 1180
},
{
"epoch": 0.6971294669009959,
"grad_norm": 0.6978306770324707,
"learning_rate": 0.00047851842208045825,
"loss": 0.1756,
"step": 1190
},
{
"epoch": 0.70298769771529,
"grad_norm": 1.8298044204711914,
"learning_rate": 0.00047786746517380555,
"loss": 0.199,
"step": 1200
},
{
"epoch": 0.7088459285295841,
"grad_norm": 1.6273216009140015,
"learning_rate": 0.00047721650826715274,
"loss": 0.2137,
"step": 1210
},
{
"epoch": 0.7147041593438781,
"grad_norm": 0.8626360893249512,
"learning_rate": 0.00047656555136049993,
"loss": 0.1602,
"step": 1220
},
{
"epoch": 0.7205623901581723,
"grad_norm": 1.0214688777923584,
"learning_rate": 0.0004759145944538472,
"loss": 0.2219,
"step": 1230
},
{
"epoch": 0.7264206209724663,
"grad_norm": 1.6713732481002808,
"learning_rate": 0.00047526363754719437,
"loss": 0.2086,
"step": 1240
},
{
"epoch": 0.7322788517867604,
"grad_norm": 1.6285948753356934,
"learning_rate": 0.00047461268064054156,
"loss": 0.1886,
"step": 1250
},
{
"epoch": 0.7381370826010545,
"grad_norm": 1.1279932260513306,
"learning_rate": 0.00047396172373388886,
"loss": 0.1786,
"step": 1260
},
{
"epoch": 0.7439953134153485,
"grad_norm": 1.1999080181121826,
"learning_rate": 0.00047331076682723605,
"loss": 0.2515,
"step": 1270
},
{
"epoch": 0.7498535442296427,
"grad_norm": 0.6995332837104797,
"learning_rate": 0.0004726598099205833,
"loss": 0.193,
"step": 1280
},
{
"epoch": 0.7557117750439367,
"grad_norm": 1.1866198778152466,
"learning_rate": 0.0004720088530139305,
"loss": 0.1641,
"step": 1290
},
{
"epoch": 0.7615700058582309,
"grad_norm": 0.7438950538635254,
"learning_rate": 0.0004713578961072777,
"loss": 0.2166,
"step": 1300
},
{
"epoch": 0.7674282366725249,
"grad_norm": 1.5525139570236206,
"learning_rate": 0.0004707069392006249,
"loss": 0.1733,
"step": 1310
},
{
"epoch": 0.773286467486819,
"grad_norm": 1.763756275177002,
"learning_rate": 0.00047005598229397217,
"loss": 0.1777,
"step": 1320
},
{
"epoch": 0.7791446983011131,
"grad_norm": 1.4447243213653564,
"learning_rate": 0.00046940502538731936,
"loss": 0.1423,
"step": 1330
},
{
"epoch": 0.7850029291154071,
"grad_norm": 0.3512236177921295,
"learning_rate": 0.0004687540684806666,
"loss": 0.2049,
"step": 1340
},
{
"epoch": 0.7908611599297012,
"grad_norm": 1.6187587976455688,
"learning_rate": 0.0004681031115740138,
"loss": 0.2369,
"step": 1350
},
{
"epoch": 0.7967193907439953,
"grad_norm": 1.3473628759384155,
"learning_rate": 0.00046745215466736104,
"loss": 0.2218,
"step": 1360
},
{
"epoch": 0.8025776215582894,
"grad_norm": 0.4788994789123535,
"learning_rate": 0.00046680119776070823,
"loss": 0.1477,
"step": 1370
},
{
"epoch": 0.8084358523725835,
"grad_norm": 1.1703928709030151,
"learning_rate": 0.0004661502408540555,
"loss": 0.2457,
"step": 1380
},
{
"epoch": 0.8142940831868776,
"grad_norm": 1.6234064102172852,
"learning_rate": 0.0004654992839474027,
"loss": 0.232,
"step": 1390
},
{
"epoch": 0.8201523140011716,
"grad_norm": 0.4102720618247986,
"learning_rate": 0.0004648483270407499,
"loss": 0.1856,
"step": 1400
},
{
"epoch": 0.8260105448154658,
"grad_norm": 0.49339228868484497,
"learning_rate": 0.0004641973701340971,
"loss": 0.1961,
"step": 1410
},
{
"epoch": 0.8318687756297598,
"grad_norm": 1.422866702079773,
"learning_rate": 0.00046354641322744435,
"loss": 0.2002,
"step": 1420
},
{
"epoch": 0.8377270064440538,
"grad_norm": 2.2886526584625244,
"learning_rate": 0.00046289545632079154,
"loss": 0.2098,
"step": 1430
},
{
"epoch": 0.843585237258348,
"grad_norm": 1.019761085510254,
"learning_rate": 0.00046224449941413884,
"loss": 0.1656,
"step": 1440
},
{
"epoch": 0.849443468072642,
"grad_norm": 0.9850839972496033,
"learning_rate": 0.00046159354250748603,
"loss": 0.1725,
"step": 1450
},
{
"epoch": 0.8553016988869362,
"grad_norm": 0.8339518308639526,
"learning_rate": 0.0004609425856008332,
"loss": 0.1312,
"step": 1460
},
{
"epoch": 0.8611599297012302,
"grad_norm": 2.1276755332946777,
"learning_rate": 0.00046029162869418047,
"loss": 0.1281,
"step": 1470
},
{
"epoch": 0.8670181605155243,
"grad_norm": 2.526759386062622,
"learning_rate": 0.00045964067178752766,
"loss": 0.2324,
"step": 1480
},
{
"epoch": 0.8728763913298184,
"grad_norm": 1.4450796842575073,
"learning_rate": 0.00045898971488087485,
"loss": 0.191,
"step": 1490
},
{
"epoch": 0.8787346221441125,
"grad_norm": 1.1786580085754395,
"learning_rate": 0.00045833875797422215,
"loss": 0.2706,
"step": 1500
},
{
"epoch": 0.8845928529584065,
"grad_norm": 1.5052456855773926,
"learning_rate": 0.00045768780106756934,
"loss": 0.2679,
"step": 1510
},
{
"epoch": 0.8904510837727007,
"grad_norm": 0.3879900872707367,
"learning_rate": 0.0004570368441609166,
"loss": 0.1693,
"step": 1520
},
{
"epoch": 0.8963093145869947,
"grad_norm": 1.613844394683838,
"learning_rate": 0.0004563858872542638,
"loss": 0.2008,
"step": 1530
},
{
"epoch": 0.9021675454012889,
"grad_norm": 1.3180102109909058,
"learning_rate": 0.00045573493034761097,
"loss": 0.2123,
"step": 1540
},
{
"epoch": 0.9080257762155829,
"grad_norm": 1.0241358280181885,
"learning_rate": 0.0004550839734409582,
"loss": 0.153,
"step": 1550
},
{
"epoch": 0.9138840070298769,
"grad_norm": 0.9837117791175842,
"learning_rate": 0.00045443301653430546,
"loss": 0.2067,
"step": 1560
},
{
"epoch": 0.9197422378441711,
"grad_norm": 0.6446996927261353,
"learning_rate": 0.00045378205962765266,
"loss": 0.1446,
"step": 1570
},
{
"epoch": 0.9256004686584651,
"grad_norm": 0.733072817325592,
"learning_rate": 0.0004531311027209999,
"loss": 0.1972,
"step": 1580
},
{
"epoch": 0.9314586994727593,
"grad_norm": 1.2420257329940796,
"learning_rate": 0.0004524801458143471,
"loss": 0.1854,
"step": 1590
},
{
"epoch": 0.9373169302870533,
"grad_norm": 2.1636710166931152,
"learning_rate": 0.00045182918890769434,
"loss": 0.2689,
"step": 1600
},
{
"epoch": 0.9431751611013474,
"grad_norm": 0.833899199962616,
"learning_rate": 0.00045117823200104153,
"loss": 0.1944,
"step": 1610
},
{
"epoch": 0.9490333919156415,
"grad_norm": 0.98338782787323,
"learning_rate": 0.0004505272750943887,
"loss": 0.1568,
"step": 1620
},
{
"epoch": 0.9548916227299356,
"grad_norm": 0.2556711435317993,
"learning_rate": 0.000449876318187736,
"loss": 0.145,
"step": 1630
},
{
"epoch": 0.9607498535442296,
"grad_norm": 1.3900483846664429,
"learning_rate": 0.0004492253612810832,
"loss": 0.1975,
"step": 1640
},
{
"epoch": 0.9666080843585237,
"grad_norm": 0.9346416592597961,
"learning_rate": 0.0004485744043744304,
"loss": 0.2289,
"step": 1650
},
{
"epoch": 0.9724663151728178,
"grad_norm": 0.9890295267105103,
"learning_rate": 0.00044792344746777765,
"loss": 0.1623,
"step": 1660
},
{
"epoch": 0.9783245459871119,
"grad_norm": 0.3821526765823364,
"learning_rate": 0.00044727249056112484,
"loss": 0.1841,
"step": 1670
},
{
"epoch": 0.984182776801406,
"grad_norm": 0.9447836875915527,
"learning_rate": 0.00044662153365447203,
"loss": 0.155,
"step": 1680
},
{
"epoch": 0.9900410076157,
"grad_norm": 1.4631438255310059,
"learning_rate": 0.00044597057674781933,
"loss": 0.1878,
"step": 1690
},
{
"epoch": 0.9958992384299942,
"grad_norm": 0.7002029418945312,
"learning_rate": 0.0004453196198411665,
"loss": 0.1585,
"step": 1700
},
{
"epoch": 1.0,
"eval_f1": 0.8884904983865185,
"eval_loss": 0.29340454936027527,
"eval_runtime": 15.1829,
"eval_samples_per_second": 197.591,
"eval_steps_per_second": 6.191,
"step": 1707
},
{
"epoch": 1.0017574692442883,
"grad_norm": 1.4538880586624146,
"learning_rate": 0.00044466866293451377,
"loss": 0.1659,
"step": 1710
},
{
"epoch": 1.0076157000585824,
"grad_norm": 1.1730880737304688,
"learning_rate": 0.00044401770602786096,
"loss": 0.2061,
"step": 1720
},
{
"epoch": 1.0134739308728764,
"grad_norm": 1.1373745203018188,
"learning_rate": 0.00044336674912120815,
"loss": 0.1307,
"step": 1730
},
{
"epoch": 1.0193321616871704,
"grad_norm": 1.5837347507476807,
"learning_rate": 0.0004427157922145554,
"loss": 0.1837,
"step": 1740
},
{
"epoch": 1.0251903925014645,
"grad_norm": 1.2740920782089233,
"learning_rate": 0.00044206483530790264,
"loss": 0.2228,
"step": 1750
},
{
"epoch": 1.0310486233157587,
"grad_norm": 0.7164502739906311,
"learning_rate": 0.00044141387840124983,
"loss": 0.1628,
"step": 1760
},
{
"epoch": 1.0369068541300528,
"grad_norm": 0.4816601872444153,
"learning_rate": 0.0004407629214945971,
"loss": 0.1548,
"step": 1770
},
{
"epoch": 1.0427650849443468,
"grad_norm": 2.0663034915924072,
"learning_rate": 0.00044011196458794427,
"loss": 0.1997,
"step": 1780
},
{
"epoch": 1.0486233157586409,
"grad_norm": 0.48207902908325195,
"learning_rate": 0.0004394610076812915,
"loss": 0.1376,
"step": 1790
},
{
"epoch": 1.054481546572935,
"grad_norm": 1.8158009052276611,
"learning_rate": 0.0004388100507746387,
"loss": 0.1746,
"step": 1800
},
{
"epoch": 1.060339777387229,
"grad_norm": 0.8723987936973572,
"learning_rate": 0.00043815909386798595,
"loss": 0.182,
"step": 1810
},
{
"epoch": 1.0661980082015232,
"grad_norm": 1.8085635900497437,
"learning_rate": 0.0004375081369613332,
"loss": 0.1686,
"step": 1820
},
{
"epoch": 1.0720562390158173,
"grad_norm": 1.800154447555542,
"learning_rate": 0.0004368571800546804,
"loss": 0.1744,
"step": 1830
},
{
"epoch": 1.0779144698301113,
"grad_norm": 0.9144867062568665,
"learning_rate": 0.0004362062231480276,
"loss": 0.1776,
"step": 1840
},
{
"epoch": 1.0837727006444053,
"grad_norm": 1.4381457567214966,
"learning_rate": 0.0004355552662413748,
"loss": 0.1944,
"step": 1850
},
{
"epoch": 1.0896309314586994,
"grad_norm": 0.435210645198822,
"learning_rate": 0.000434904309334722,
"loss": 0.1903,
"step": 1860
},
{
"epoch": 1.0954891622729936,
"grad_norm": 1.2425928115844727,
"learning_rate": 0.0004342533524280693,
"loss": 0.1455,
"step": 1870
},
{
"epoch": 1.1013473930872877,
"grad_norm": 1.3132634162902832,
"learning_rate": 0.0004336023955214165,
"loss": 0.1694,
"step": 1880
},
{
"epoch": 1.1072056239015817,
"grad_norm": 0.8136222958564758,
"learning_rate": 0.0004329514386147637,
"loss": 0.1287,
"step": 1890
},
{
"epoch": 1.1130638547158758,
"grad_norm": 1.0159581899642944,
"learning_rate": 0.00043230048170811095,
"loss": 0.1716,
"step": 1900
},
{
"epoch": 1.1189220855301698,
"grad_norm": 1.047573208808899,
"learning_rate": 0.00043164952480145814,
"loss": 0.203,
"step": 1910
},
{
"epoch": 1.124780316344464,
"grad_norm": 0.5778195858001709,
"learning_rate": 0.00043099856789480533,
"loss": 0.1901,
"step": 1920
},
{
"epoch": 1.1306385471587581,
"grad_norm": 0.5892320871353149,
"learning_rate": 0.00043034761098815263,
"loss": 0.1872,
"step": 1930
},
{
"epoch": 1.1364967779730522,
"grad_norm": 0.8339682817459106,
"learning_rate": 0.0004296966540814998,
"loss": 0.1555,
"step": 1940
},
{
"epoch": 1.1423550087873462,
"grad_norm": 1.1029021739959717,
"learning_rate": 0.00042904569717484706,
"loss": 0.1283,
"step": 1950
},
{
"epoch": 1.1482132396016402,
"grad_norm": 0.9936773180961609,
"learning_rate": 0.00042839474026819426,
"loss": 0.157,
"step": 1960
},
{
"epoch": 1.1540714704159343,
"grad_norm": 1.108543038368225,
"learning_rate": 0.00042774378336154145,
"loss": 0.1472,
"step": 1970
},
{
"epoch": 1.1599297012302285,
"grad_norm": 0.6184877157211304,
"learning_rate": 0.0004270928264548887,
"loss": 0.127,
"step": 1980
},
{
"epoch": 1.1657879320445226,
"grad_norm": 0.9069430828094482,
"learning_rate": 0.00042644186954823594,
"loss": 0.1197,
"step": 1990
},
{
"epoch": 1.1716461628588166,
"grad_norm": 1.8886994123458862,
"learning_rate": 0.00042579091264158313,
"loss": 0.1614,
"step": 2000
},
{
"epoch": 1.1775043936731107,
"grad_norm": 0.9473972916603088,
"learning_rate": 0.0004251399557349304,
"loss": 0.1793,
"step": 2010
},
{
"epoch": 1.1833626244874047,
"grad_norm": 0.49200665950775146,
"learning_rate": 0.00042448899882827757,
"loss": 0.1558,
"step": 2020
},
{
"epoch": 1.189220855301699,
"grad_norm": 1.1897913217544556,
"learning_rate": 0.00042383804192162476,
"loss": 0.1287,
"step": 2030
},
{
"epoch": 1.195079086115993,
"grad_norm": 0.4664163291454315,
"learning_rate": 0.000423187085014972,
"loss": 0.1385,
"step": 2040
},
{
"epoch": 1.200937316930287,
"grad_norm": 0.6468362808227539,
"learning_rate": 0.00042253612810831925,
"loss": 0.1646,
"step": 2050
},
{
"epoch": 1.206795547744581,
"grad_norm": 0.9662730097770691,
"learning_rate": 0.0004218851712016665,
"loss": 0.1461,
"step": 2060
},
{
"epoch": 1.2126537785588751,
"grad_norm": 1.2626930475234985,
"learning_rate": 0.0004212342142950137,
"loss": 0.1373,
"step": 2070
},
{
"epoch": 1.2185120093731694,
"grad_norm": 2.0867109298706055,
"learning_rate": 0.0004205832573883609,
"loss": 0.2043,
"step": 2080
},
{
"epoch": 1.2243702401874634,
"grad_norm": 1.0699323415756226,
"learning_rate": 0.0004199323004817081,
"loss": 0.1493,
"step": 2090
},
{
"epoch": 1.2302284710017575,
"grad_norm": 0.5708653926849365,
"learning_rate": 0.0004192813435750553,
"loss": 0.1596,
"step": 2100
},
{
"epoch": 1.2360867018160515,
"grad_norm": 1.6544972658157349,
"learning_rate": 0.00041863038666840256,
"loss": 0.1858,
"step": 2110
},
{
"epoch": 1.2419449326303456,
"grad_norm": 1.7309073209762573,
"learning_rate": 0.0004179794297617498,
"loss": 0.093,
"step": 2120
},
{
"epoch": 1.2478031634446398,
"grad_norm": 1.0835295915603638,
"learning_rate": 0.000417328472855097,
"loss": 0.1629,
"step": 2130
},
{
"epoch": 1.2536613942589339,
"grad_norm": 0.587459146976471,
"learning_rate": 0.00041667751594844424,
"loss": 0.1727,
"step": 2140
},
{
"epoch": 1.259519625073228,
"grad_norm": 3.7887632846832275,
"learning_rate": 0.00041602655904179143,
"loss": 0.1967,
"step": 2150
},
{
"epoch": 1.265377855887522,
"grad_norm": 1.1685255765914917,
"learning_rate": 0.0004153756021351386,
"loss": 0.1311,
"step": 2160
},
{
"epoch": 1.271236086701816,
"grad_norm": 0.7206849455833435,
"learning_rate": 0.0004147246452284859,
"loss": 0.1448,
"step": 2170
},
{
"epoch": 1.2770943175161102,
"grad_norm": 1.402601718902588,
"learning_rate": 0.0004140736883218331,
"loss": 0.1841,
"step": 2180
},
{
"epoch": 1.2829525483304043,
"grad_norm": 0.5288113355636597,
"learning_rate": 0.0004134227314151803,
"loss": 0.0906,
"step": 2190
},
{
"epoch": 1.2888107791446983,
"grad_norm": 1.5257741212844849,
"learning_rate": 0.00041277177450852755,
"loss": 0.1277,
"step": 2200
},
{
"epoch": 1.2946690099589924,
"grad_norm": 0.6375269889831543,
"learning_rate": 0.00041212081760187474,
"loss": 0.1615,
"step": 2210
},
{
"epoch": 1.3005272407732864,
"grad_norm": 1.8483799695968628,
"learning_rate": 0.000411469860695222,
"loss": 0.1615,
"step": 2220
},
{
"epoch": 1.3063854715875807,
"grad_norm": 0.9884814620018005,
"learning_rate": 0.00041081890378856924,
"loss": 0.1591,
"step": 2230
},
{
"epoch": 1.3122437024018747,
"grad_norm": 1.186396598815918,
"learning_rate": 0.0004101679468819164,
"loss": 0.1757,
"step": 2240
},
{
"epoch": 1.3181019332161688,
"grad_norm": 0.8369488716125488,
"learning_rate": 0.00040951698997526367,
"loss": 0.1475,
"step": 2250
},
{
"epoch": 1.3239601640304628,
"grad_norm": 1.4090021848678589,
"learning_rate": 0.00040886603306861086,
"loss": 0.1246,
"step": 2260
},
{
"epoch": 1.3298183948447568,
"grad_norm": 2.212960958480835,
"learning_rate": 0.00040821507616195805,
"loss": 0.2006,
"step": 2270
},
{
"epoch": 1.335676625659051,
"grad_norm": 1.3161710500717163,
"learning_rate": 0.0004075641192553053,
"loss": 0.1085,
"step": 2280
},
{
"epoch": 1.341534856473345,
"grad_norm": 2.699848175048828,
"learning_rate": 0.00040691316234865255,
"loss": 0.1698,
"step": 2290
},
{
"epoch": 1.3473930872876392,
"grad_norm": 0.8176013827323914,
"learning_rate": 0.0004062622054419998,
"loss": 0.0913,
"step": 2300
},
{
"epoch": 1.3532513181019332,
"grad_norm": 1.959524393081665,
"learning_rate": 0.000405611248535347,
"loss": 0.1192,
"step": 2310
},
{
"epoch": 1.3591095489162273,
"grad_norm": 1.9907140731811523,
"learning_rate": 0.0004049602916286942,
"loss": 0.1565,
"step": 2320
},
{
"epoch": 1.3649677797305213,
"grad_norm": 0.7778204679489136,
"learning_rate": 0.0004043093347220414,
"loss": 0.1835,
"step": 2330
},
{
"epoch": 1.3708260105448153,
"grad_norm": 0.8291440606117249,
"learning_rate": 0.0004036583778153886,
"loss": 0.1627,
"step": 2340
},
{
"epoch": 1.3766842413591096,
"grad_norm": 0.6740533709526062,
"learning_rate": 0.00040300742090873586,
"loss": 0.1645,
"step": 2350
},
{
"epoch": 1.3825424721734036,
"grad_norm": 1.1750842332839966,
"learning_rate": 0.0004023564640020831,
"loss": 0.1404,
"step": 2360
},
{
"epoch": 1.3884007029876977,
"grad_norm": 0.880556046962738,
"learning_rate": 0.0004017055070954303,
"loss": 0.1242,
"step": 2370
},
{
"epoch": 1.3942589338019917,
"grad_norm": 0.932043731212616,
"learning_rate": 0.0004010545501887775,
"loss": 0.1217,
"step": 2380
},
{
"epoch": 1.4001171646162858,
"grad_norm": 0.5808772444725037,
"learning_rate": 0.00040040359328212473,
"loss": 0.2381,
"step": 2390
},
{
"epoch": 1.40597539543058,
"grad_norm": 0.8846039175987244,
"learning_rate": 0.0003997526363754719,
"loss": 0.1702,
"step": 2400
},
{
"epoch": 1.411833626244874,
"grad_norm": 2.054563522338867,
"learning_rate": 0.00039910167946881917,
"loss": 0.1562,
"step": 2410
},
{
"epoch": 1.4176918570591681,
"grad_norm": 1.1283624172210693,
"learning_rate": 0.0003984507225621664,
"loss": 0.2018,
"step": 2420
},
{
"epoch": 1.4235500878734622,
"grad_norm": 0.28652748465538025,
"learning_rate": 0.0003977997656555136,
"loss": 0.1612,
"step": 2430
},
{
"epoch": 1.4294083186877562,
"grad_norm": 2.846034288406372,
"learning_rate": 0.00039714880874886085,
"loss": 0.1342,
"step": 2440
},
{
"epoch": 1.4352665495020505,
"grad_norm": 2.90832257270813,
"learning_rate": 0.00039649785184220804,
"loss": 0.1695,
"step": 2450
},
{
"epoch": 1.4411247803163445,
"grad_norm": 0.8565723299980164,
"learning_rate": 0.00039584689493555523,
"loss": 0.1446,
"step": 2460
},
{
"epoch": 1.4469830111306385,
"grad_norm": 0.6863169074058533,
"learning_rate": 0.0003951959380289025,
"loss": 0.1666,
"step": 2470
},
{
"epoch": 1.4528412419449326,
"grad_norm": 1.357521414756775,
"learning_rate": 0.0003945449811222497,
"loss": 0.1354,
"step": 2480
},
{
"epoch": 1.4586994727592266,
"grad_norm": 0.3757944703102112,
"learning_rate": 0.00039389402421559697,
"loss": 0.1386,
"step": 2490
},
{
"epoch": 1.4645577035735209,
"grad_norm": 0.5341873168945312,
"learning_rate": 0.00039324306730894416,
"loss": 0.1479,
"step": 2500
},
{
"epoch": 1.470415934387815,
"grad_norm": 0.9452828168869019,
"learning_rate": 0.00039259211040229135,
"loss": 0.0944,
"step": 2510
},
{
"epoch": 1.476274165202109,
"grad_norm": 0.904435932636261,
"learning_rate": 0.0003919411534956386,
"loss": 0.0872,
"step": 2520
},
{
"epoch": 1.482132396016403,
"grad_norm": 1.0896387100219727,
"learning_rate": 0.0003912901965889858,
"loss": 0.1463,
"step": 2530
},
{
"epoch": 1.487990626830697,
"grad_norm": 0.9536682367324829,
"learning_rate": 0.00039063923968233303,
"loss": 0.1748,
"step": 2540
},
{
"epoch": 1.4938488576449913,
"grad_norm": 0.42995181679725647,
"learning_rate": 0.0003899882827756803,
"loss": 0.1005,
"step": 2550
},
{
"epoch": 1.4997070884592854,
"grad_norm": 1.3272085189819336,
"learning_rate": 0.00038933732586902747,
"loss": 0.1755,
"step": 2560
},
{
"epoch": 1.5055653192735794,
"grad_norm": 1.5230518579483032,
"learning_rate": 0.0003886863689623747,
"loss": 0.137,
"step": 2570
},
{
"epoch": 1.5114235500878734,
"grad_norm": 1.9343452453613281,
"learning_rate": 0.0003880354120557219,
"loss": 0.0864,
"step": 2580
},
{
"epoch": 1.5172817809021675,
"grad_norm": 0.27782219648361206,
"learning_rate": 0.0003873844551490691,
"loss": 0.232,
"step": 2590
},
{
"epoch": 1.5231400117164617,
"grad_norm": 0.78903728723526,
"learning_rate": 0.0003867334982424164,
"loss": 0.1586,
"step": 2600
},
{
"epoch": 1.5289982425307556,
"grad_norm": 1.1951757669448853,
"learning_rate": 0.0003860825413357636,
"loss": 0.2101,
"step": 2610
},
{
"epoch": 1.5348564733450498,
"grad_norm": 0.6688250303268433,
"learning_rate": 0.0003854315844291108,
"loss": 0.1457,
"step": 2620
},
{
"epoch": 1.5407147041593439,
"grad_norm": 0.8118303418159485,
"learning_rate": 0.000384780627522458,
"loss": 0.1941,
"step": 2630
},
{
"epoch": 1.546572934973638,
"grad_norm": 0.43793728947639465,
"learning_rate": 0.0003841296706158052,
"loss": 0.139,
"step": 2640
},
{
"epoch": 1.5524311657879322,
"grad_norm": 0.8637521862983704,
"learning_rate": 0.00038347871370915246,
"loss": 0.1452,
"step": 2650
},
{
"epoch": 1.558289396602226,
"grad_norm": 1.656689167022705,
"learning_rate": 0.0003828277568024997,
"loss": 0.1937,
"step": 2660
},
{
"epoch": 1.5641476274165202,
"grad_norm": 1.5395698547363281,
"learning_rate": 0.0003821767998958469,
"loss": 0.1465,
"step": 2670
},
{
"epoch": 1.5700058582308143,
"grad_norm": 0.8242401480674744,
"learning_rate": 0.00038152584298919415,
"loss": 0.1497,
"step": 2680
},
{
"epoch": 1.5758640890451083,
"grad_norm": 1.000875473022461,
"learning_rate": 0.00038087488608254134,
"loss": 0.1878,
"step": 2690
},
{
"epoch": 1.5817223198594026,
"grad_norm": 0.8596873879432678,
"learning_rate": 0.00038022392917588853,
"loss": 0.1509,
"step": 2700
},
{
"epoch": 1.5875805506736964,
"grad_norm": 2.0601985454559326,
"learning_rate": 0.0003795729722692358,
"loss": 0.1647,
"step": 2710
},
{
"epoch": 1.5934387814879907,
"grad_norm": 1.012863278388977,
"learning_rate": 0.000378922015362583,
"loss": 0.1352,
"step": 2720
},
{
"epoch": 1.5992970123022847,
"grad_norm": 1.4908140897750854,
"learning_rate": 0.00037827105845593027,
"loss": 0.1741,
"step": 2730
},
{
"epoch": 1.6051552431165788,
"grad_norm": 0.9020434021949768,
"learning_rate": 0.00037762010154927746,
"loss": 0.1376,
"step": 2740
},
{
"epoch": 1.611013473930873,
"grad_norm": 0.7156019806861877,
"learning_rate": 0.00037696914464262465,
"loss": 0.1685,
"step": 2750
},
{
"epoch": 1.6168717047451668,
"grad_norm": 1.503300428390503,
"learning_rate": 0.0003763181877359719,
"loss": 0.1478,
"step": 2760
},
{
"epoch": 1.622729935559461,
"grad_norm": 0.546312153339386,
"learning_rate": 0.0003756672308293191,
"loss": 0.1061,
"step": 2770
},
{
"epoch": 1.6285881663737551,
"grad_norm": 0.4983227550983429,
"learning_rate": 0.00037501627392266633,
"loss": 0.1315,
"step": 2780
},
{
"epoch": 1.6344463971880492,
"grad_norm": 0.6810446381568909,
"learning_rate": 0.0003743653170160136,
"loss": 0.1199,
"step": 2790
},
{
"epoch": 1.6403046280023434,
"grad_norm": 1.2596421241760254,
"learning_rate": 0.00037371436010936077,
"loss": 0.1201,
"step": 2800
},
{
"epoch": 1.6461628588166373,
"grad_norm": 0.9648050665855408,
"learning_rate": 0.00037306340320270796,
"loss": 0.1358,
"step": 2810
},
{
"epoch": 1.6520210896309315,
"grad_norm": 1.0759117603302002,
"learning_rate": 0.0003724124462960552,
"loss": 0.1664,
"step": 2820
},
{
"epoch": 1.6578793204452256,
"grad_norm": 1.0369553565979004,
"learning_rate": 0.0003717614893894024,
"loss": 0.1737,
"step": 2830
},
{
"epoch": 1.6637375512595196,
"grad_norm": 1.0915820598602295,
"learning_rate": 0.0003711105324827497,
"loss": 0.1355,
"step": 2840
},
{
"epoch": 1.6695957820738139,
"grad_norm": 1.3576059341430664,
"learning_rate": 0.0003704595755760969,
"loss": 0.1674,
"step": 2850
},
{
"epoch": 1.6754540128881077,
"grad_norm": 0.8195040822029114,
"learning_rate": 0.0003698086186694441,
"loss": 0.1284,
"step": 2860
},
{
"epoch": 1.681312243702402,
"grad_norm": 1.8879438638687134,
"learning_rate": 0.0003691576617627913,
"loss": 0.1336,
"step": 2870
},
{
"epoch": 1.687170474516696,
"grad_norm": 0.7765256762504578,
"learning_rate": 0.0003685067048561385,
"loss": 0.1347,
"step": 2880
},
{
"epoch": 1.69302870533099,
"grad_norm": 1.017264723777771,
"learning_rate": 0.0003678557479494857,
"loss": 0.1548,
"step": 2890
},
{
"epoch": 1.698886936145284,
"grad_norm": 0.42928430438041687,
"learning_rate": 0.000367204791042833,
"loss": 0.1526,
"step": 2900
},
{
"epoch": 1.7047451669595781,
"grad_norm": 0.75179123878479,
"learning_rate": 0.0003665538341361802,
"loss": 0.1415,
"step": 2910
},
{
"epoch": 1.7106033977738724,
"grad_norm": 1.2041255235671997,
"learning_rate": 0.00036590287722952744,
"loss": 0.1065,
"step": 2920
},
{
"epoch": 1.7164616285881664,
"grad_norm": 1.6273362636566162,
"learning_rate": 0.00036525192032287463,
"loss": 0.1683,
"step": 2930
},
{
"epoch": 1.7223198594024605,
"grad_norm": 1.5621204376220703,
"learning_rate": 0.0003646009634162218,
"loss": 0.1626,
"step": 2940
},
{
"epoch": 1.7281780902167545,
"grad_norm": 1.275732398033142,
"learning_rate": 0.00036395000650956907,
"loss": 0.1235,
"step": 2950
},
{
"epoch": 1.7340363210310485,
"grad_norm": 1.7712377309799194,
"learning_rate": 0.0003632990496029163,
"loss": 0.1851,
"step": 2960
},
{
"epoch": 1.7398945518453428,
"grad_norm": 0.7929120659828186,
"learning_rate": 0.0003626480926962635,
"loss": 0.1422,
"step": 2970
},
{
"epoch": 1.7457527826596366,
"grad_norm": 0.8228936791419983,
"learning_rate": 0.00036199713578961075,
"loss": 0.1494,
"step": 2980
},
{
"epoch": 1.751611013473931,
"grad_norm": 1.4195990562438965,
"learning_rate": 0.00036134617888295795,
"loss": 0.1466,
"step": 2990
},
{
"epoch": 1.757469244288225,
"grad_norm": 0.2758101522922516,
"learning_rate": 0.0003606952219763052,
"loss": 0.1443,
"step": 3000
},
{
"epoch": 1.763327475102519,
"grad_norm": 0.9897528886795044,
"learning_rate": 0.0003600442650696524,
"loss": 0.1148,
"step": 3010
},
{
"epoch": 1.7691857059168132,
"grad_norm": 0.1884101778268814,
"learning_rate": 0.00035939330816299963,
"loss": 0.0635,
"step": 3020
},
{
"epoch": 1.775043936731107,
"grad_norm": 1.4347259998321533,
"learning_rate": 0.0003587423512563469,
"loss": 0.1919,
"step": 3030
},
{
"epoch": 1.7809021675454013,
"grad_norm": 0.8466237783432007,
"learning_rate": 0.00035809139434969406,
"loss": 0.1725,
"step": 3040
},
{
"epoch": 1.7867603983596954,
"grad_norm": 1.203354001045227,
"learning_rate": 0.00035744043744304126,
"loss": 0.1351,
"step": 3050
},
{
"epoch": 1.7926186291739894,
"grad_norm": 0.47008630633354187,
"learning_rate": 0.0003567894805363885,
"loss": 0.1546,
"step": 3060
},
{
"epoch": 1.7984768599882837,
"grad_norm": 1.0304838418960571,
"learning_rate": 0.0003561385236297357,
"loss": 0.1989,
"step": 3070
},
{
"epoch": 1.8043350908025775,
"grad_norm": 0.6836739182472229,
"learning_rate": 0.000355487566723083,
"loss": 0.1482,
"step": 3080
},
{
"epoch": 1.8101933216168717,
"grad_norm": 1.5532166957855225,
"learning_rate": 0.0003548366098164302,
"loss": 0.2138,
"step": 3090
},
{
"epoch": 1.8160515524311658,
"grad_norm": 0.4863007962703705,
"learning_rate": 0.0003541856529097774,
"loss": 0.1219,
"step": 3100
},
{
"epoch": 1.8219097832454598,
"grad_norm": 0.6631212830543518,
"learning_rate": 0.0003535346960031246,
"loss": 0.1539,
"step": 3110
},
{
"epoch": 1.827768014059754,
"grad_norm": 0.7438946962356567,
"learning_rate": 0.0003528837390964718,
"loss": 0.1615,
"step": 3120
},
{
"epoch": 1.833626244874048,
"grad_norm": 1.0698827505111694,
"learning_rate": 0.000352232782189819,
"loss": 0.2044,
"step": 3130
},
{
"epoch": 1.8394844756883422,
"grad_norm": 0.41544845700263977,
"learning_rate": 0.00035158182528316625,
"loss": 0.1379,
"step": 3140
},
{
"epoch": 1.8453427065026362,
"grad_norm": 1.023944616317749,
"learning_rate": 0.0003509308683765135,
"loss": 0.1466,
"step": 3150
},
{
"epoch": 1.8512009373169303,
"grad_norm": 0.8214076161384583,
"learning_rate": 0.0003502799114698607,
"loss": 0.1181,
"step": 3160
},
{
"epoch": 1.8570591681312245,
"grad_norm": 1.027405858039856,
"learning_rate": 0.00034962895456320793,
"loss": 0.1083,
"step": 3170
},
{
"epoch": 1.8629173989455183,
"grad_norm": 3.0552237033843994,
"learning_rate": 0.0003489779976565551,
"loss": 0.1734,
"step": 3180
},
{
"epoch": 1.8687756297598126,
"grad_norm": 1.0937753915786743,
"learning_rate": 0.00034832704074990237,
"loss": 0.1142,
"step": 3190
},
{
"epoch": 1.8746338605741066,
"grad_norm": 1.0819835662841797,
"learning_rate": 0.00034767608384324956,
"loss": 0.1135,
"step": 3200
},
{
"epoch": 1.8804920913884007,
"grad_norm": 1.1981408596038818,
"learning_rate": 0.0003470251269365968,
"loss": 0.1087,
"step": 3210
},
{
"epoch": 1.886350322202695,
"grad_norm": 1.8808680772781372,
"learning_rate": 0.00034637417002994405,
"loss": 0.1477,
"step": 3220
},
{
"epoch": 1.8922085530169888,
"grad_norm": 1.3297356367111206,
"learning_rate": 0.00034572321312329124,
"loss": 0.1422,
"step": 3230
},
{
"epoch": 1.898066783831283,
"grad_norm": 1.0454509258270264,
"learning_rate": 0.00034507225621663843,
"loss": 0.1246,
"step": 3240
},
{
"epoch": 1.903925014645577,
"grad_norm": 0.9511450529098511,
"learning_rate": 0.0003444212993099857,
"loss": 0.1102,
"step": 3250
},
{
"epoch": 1.909783245459871,
"grad_norm": 1.416601300239563,
"learning_rate": 0.00034377034240333287,
"loss": 0.1576,
"step": 3260
},
{
"epoch": 1.9156414762741654,
"grad_norm": 0.8354918956756592,
"learning_rate": 0.00034311938549668017,
"loss": 0.1596,
"step": 3270
},
{
"epoch": 1.9214997070884592,
"grad_norm": 1.1035876274108887,
"learning_rate": 0.00034246842859002736,
"loss": 0.1418,
"step": 3280
},
{
"epoch": 1.9273579379027534,
"grad_norm": 1.3299360275268555,
"learning_rate": 0.00034181747168337455,
"loss": 0.1353,
"step": 3290
},
{
"epoch": 1.9332161687170475,
"grad_norm": 1.3646624088287354,
"learning_rate": 0.0003411665147767218,
"loss": 0.1488,
"step": 3300
},
{
"epoch": 1.9390743995313415,
"grad_norm": 1.2904136180877686,
"learning_rate": 0.000340515557870069,
"loss": 0.14,
"step": 3310
},
{
"epoch": 1.9449326303456356,
"grad_norm": 0.9034157991409302,
"learning_rate": 0.0003398646009634162,
"loss": 0.1954,
"step": 3320
},
{
"epoch": 1.9507908611599296,
"grad_norm": 0.7134235501289368,
"learning_rate": 0.0003392136440567635,
"loss": 0.1842,
"step": 3330
},
{
"epoch": 1.9566490919742239,
"grad_norm": 1.0327099561691284,
"learning_rate": 0.00033856268715011067,
"loss": 0.1927,
"step": 3340
},
{
"epoch": 1.962507322788518,
"grad_norm": 0.6372695565223694,
"learning_rate": 0.0003379117302434579,
"loss": 0.1247,
"step": 3350
},
{
"epoch": 1.968365553602812,
"grad_norm": 0.30220475792884827,
"learning_rate": 0.0003372607733368051,
"loss": 0.142,
"step": 3360
},
{
"epoch": 1.974223784417106,
"grad_norm": 0.4559396207332611,
"learning_rate": 0.0003366098164301523,
"loss": 0.1475,
"step": 3370
},
{
"epoch": 1.9800820152314,
"grad_norm": 0.6560627222061157,
"learning_rate": 0.00033595885952349955,
"loss": 0.1602,
"step": 3380
},
{
"epoch": 1.9859402460456943,
"grad_norm": 1.2091546058654785,
"learning_rate": 0.0003353079026168468,
"loss": 0.1333,
"step": 3390
},
{
"epoch": 1.9917984768599881,
"grad_norm": 0.6147709488868713,
"learning_rate": 0.000334656945710194,
"loss": 0.1473,
"step": 3400
},
{
"epoch": 1.9976567076742824,
"grad_norm": 0.751679539680481,
"learning_rate": 0.00033400598880354123,
"loss": 0.1326,
"step": 3410
},
{
"epoch": 2.0,
"eval_f1": 0.9051568698160837,
"eval_loss": 0.2498554289340973,
"eval_runtime": 14.577,
"eval_samples_per_second": 205.804,
"eval_steps_per_second": 6.449,
"step": 3414
},
{
"epoch": 2.0035149384885766,
"grad_norm": 0.8273231387138367,
"learning_rate": 0.0003333550318968884,
"loss": 0.1969,
"step": 3420
},
{
"epoch": 2.0093731693028705,
"grad_norm": 0.7016593217849731,
"learning_rate": 0.00033270407499023566,
"loss": 0.1126,
"step": 3430
},
{
"epoch": 2.0152314001171647,
"grad_norm": 0.6760423183441162,
"learning_rate": 0.00033205311808358286,
"loss": 0.1554,
"step": 3440
},
{
"epoch": 2.0210896309314585,
"grad_norm": 0.7149805426597595,
"learning_rate": 0.0003314021611769301,
"loss": 0.1065,
"step": 3450
},
{
"epoch": 2.026947861745753,
"grad_norm": 1.2349637746810913,
"learning_rate": 0.00033075120427027735,
"loss": 0.1916,
"step": 3460
},
{
"epoch": 2.032806092560047,
"grad_norm": 0.8604615330696106,
"learning_rate": 0.00033010024736362454,
"loss": 0.1068,
"step": 3470
},
{
"epoch": 2.038664323374341,
"grad_norm": 0.24382354319095612,
"learning_rate": 0.00032944929045697173,
"loss": 0.1375,
"step": 3480
},
{
"epoch": 2.044522554188635,
"grad_norm": 0.656825602054596,
"learning_rate": 0.000328798333550319,
"loss": 0.1482,
"step": 3490
},
{
"epoch": 2.050380785002929,
"grad_norm": 0.9618995785713196,
"learning_rate": 0.00032814737664366617,
"loss": 0.1149,
"step": 3500
},
{
"epoch": 2.0562390158172232,
"grad_norm": 1.9244462251663208,
"learning_rate": 0.00032749641973701347,
"loss": 0.1389,
"step": 3510
},
{
"epoch": 2.0620972466315175,
"grad_norm": 1.6664360761642456,
"learning_rate": 0.00032684546283036066,
"loss": 0.1432,
"step": 3520
},
{
"epoch": 2.0679554774458113,
"grad_norm": 0.4561314284801483,
"learning_rate": 0.00032619450592370785,
"loss": 0.1132,
"step": 3530
},
{
"epoch": 2.0738137082601056,
"grad_norm": 0.33170029520988464,
"learning_rate": 0.0003255435490170551,
"loss": 0.1682,
"step": 3540
},
{
"epoch": 2.0796719390743994,
"grad_norm": 0.4367826282978058,
"learning_rate": 0.0003248925921104023,
"loss": 0.0934,
"step": 3550
},
{
"epoch": 2.0855301698886937,
"grad_norm": 0.7262292504310608,
"learning_rate": 0.0003242416352037495,
"loss": 0.1238,
"step": 3560
},
{
"epoch": 2.0913884007029875,
"grad_norm": 1.0613982677459717,
"learning_rate": 0.0003235906782970968,
"loss": 0.0882,
"step": 3570
},
{
"epoch": 2.0972466315172817,
"grad_norm": 2.260653018951416,
"learning_rate": 0.00032293972139044397,
"loss": 0.2016,
"step": 3580
},
{
"epoch": 2.103104862331576,
"grad_norm": 1.0498729944229126,
"learning_rate": 0.00032228876448379116,
"loss": 0.1245,
"step": 3590
},
{
"epoch": 2.10896309314587,
"grad_norm": 1.473840594291687,
"learning_rate": 0.0003216378075771384,
"loss": 0.1602,
"step": 3600
},
{
"epoch": 2.114821323960164,
"grad_norm": 0.40939509868621826,
"learning_rate": 0.0003209868506704856,
"loss": 0.1458,
"step": 3610
},
{
"epoch": 2.120679554774458,
"grad_norm": 0.44588029384613037,
"learning_rate": 0.00032033589376383284,
"loss": 0.1174,
"step": 3620
},
{
"epoch": 2.126537785588752,
"grad_norm": 1.196237325668335,
"learning_rate": 0.0003196849368571801,
"loss": 0.1936,
"step": 3630
},
{
"epoch": 2.1323960164030464,
"grad_norm": 0.3714032769203186,
"learning_rate": 0.0003190339799505273,
"loss": 0.1558,
"step": 3640
},
{
"epoch": 2.1382542472173403,
"grad_norm": 0.8900133967399597,
"learning_rate": 0.0003183830230438745,
"loss": 0.1434,
"step": 3650
},
{
"epoch": 2.1441124780316345,
"grad_norm": 0.21238140761852264,
"learning_rate": 0.0003177320661372217,
"loss": 0.1501,
"step": 3660
},
{
"epoch": 2.1499707088459283,
"grad_norm": 0.45354288816452026,
"learning_rate": 0.0003170811092305689,
"loss": 0.1279,
"step": 3670
},
{
"epoch": 2.1558289396602226,
"grad_norm": 1.1276099681854248,
"learning_rate": 0.00031643015232391615,
"loss": 0.1237,
"step": 3680
},
{
"epoch": 2.161687170474517,
"grad_norm": 1.6248077154159546,
"learning_rate": 0.0003157791954172634,
"loss": 0.0997,
"step": 3690
},
{
"epoch": 2.1675454012888107,
"grad_norm": 0.5289869904518127,
"learning_rate": 0.00031512823851061064,
"loss": 0.1384,
"step": 3700
},
{
"epoch": 2.173403632103105,
"grad_norm": 0.781470000743866,
"learning_rate": 0.00031447728160395784,
"loss": 0.107,
"step": 3710
},
{
"epoch": 2.1792618629173988,
"grad_norm": 0.49066823720932007,
"learning_rate": 0.000313826324697305,
"loss": 0.1406,
"step": 3720
},
{
"epoch": 2.185120093731693,
"grad_norm": 0.9173513054847717,
"learning_rate": 0.00031317536779065227,
"loss": 0.1404,
"step": 3730
},
{
"epoch": 2.1909783245459873,
"grad_norm": 0.29051652550697327,
"learning_rate": 0.00031252441088399946,
"loss": 0.1694,
"step": 3740
},
{
"epoch": 2.196836555360281,
"grad_norm": 0.5923140645027161,
"learning_rate": 0.0003118734539773467,
"loss": 0.1195,
"step": 3750
},
{
"epoch": 2.2026947861745754,
"grad_norm": 0.6858001947402954,
"learning_rate": 0.00031122249707069395,
"loss": 0.18,
"step": 3760
},
{
"epoch": 2.208553016988869,
"grad_norm": 0.9082098603248596,
"learning_rate": 0.00031057154016404115,
"loss": 0.1295,
"step": 3770
},
{
"epoch": 2.2144112478031635,
"grad_norm": 0.8940849304199219,
"learning_rate": 0.0003099205832573884,
"loss": 0.163,
"step": 3780
},
{
"epoch": 2.2202694786174577,
"grad_norm": 0.36999017000198364,
"learning_rate": 0.0003092696263507356,
"loss": 0.1384,
"step": 3790
},
{
"epoch": 2.2261277094317515,
"grad_norm": 1.1672558784484863,
"learning_rate": 0.0003086186694440828,
"loss": 0.138,
"step": 3800
},
{
"epoch": 2.231985940246046,
"grad_norm": 0.8477211594581604,
"learning_rate": 0.0003079677125374301,
"loss": 0.1636,
"step": 3810
},
{
"epoch": 2.2378441710603396,
"grad_norm": 1.2284730672836304,
"learning_rate": 0.00030731675563077727,
"loss": 0.1117,
"step": 3820
},
{
"epoch": 2.243702401874634,
"grad_norm": 0.745220959186554,
"learning_rate": 0.00030666579872412446,
"loss": 0.1254,
"step": 3830
},
{
"epoch": 2.249560632688928,
"grad_norm": 1.0539661645889282,
"learning_rate": 0.0003060148418174717,
"loss": 0.1172,
"step": 3840
},
{
"epoch": 2.255418863503222,
"grad_norm": 0.3214583992958069,
"learning_rate": 0.0003053638849108189,
"loss": 0.1075,
"step": 3850
},
{
"epoch": 2.2612770943175162,
"grad_norm": 0.11182527989149094,
"learning_rate": 0.00030471292800416614,
"loss": 0.1132,
"step": 3860
},
{
"epoch": 2.26713532513181,
"grad_norm": 1.5970757007598877,
"learning_rate": 0.00030406197109751333,
"loss": 0.1469,
"step": 3870
},
{
"epoch": 2.2729935559461043,
"grad_norm": 1.57491135597229,
"learning_rate": 0.0003034110141908606,
"loss": 0.1708,
"step": 3880
},
{
"epoch": 2.2788517867603986,
"grad_norm": 0.7461884617805481,
"learning_rate": 0.0003027600572842078,
"loss": 0.1209,
"step": 3890
},
{
"epoch": 2.2847100175746924,
"grad_norm": 1.06315016746521,
"learning_rate": 0.000302109100377555,
"loss": 0.1731,
"step": 3900
},
{
"epoch": 2.2905682483889866,
"grad_norm": 1.3551595211029053,
"learning_rate": 0.0003014581434709022,
"loss": 0.1369,
"step": 3910
},
{
"epoch": 2.2964264792032805,
"grad_norm": 0.7530913352966309,
"learning_rate": 0.00030080718656424945,
"loss": 0.1821,
"step": 3920
},
{
"epoch": 2.3022847100175747,
"grad_norm": 0.5707684755325317,
"learning_rate": 0.00030015622965759664,
"loss": 0.1104,
"step": 3930
},
{
"epoch": 2.3081429408318686,
"grad_norm": 0.8748314380645752,
"learning_rate": 0.0002995052727509439,
"loss": 0.0961,
"step": 3940
},
{
"epoch": 2.314001171646163,
"grad_norm": 1.2551450729370117,
"learning_rate": 0.00029885431584429113,
"loss": 0.1167,
"step": 3950
},
{
"epoch": 2.319859402460457,
"grad_norm": 0.5332285761833191,
"learning_rate": 0.0002982033589376383,
"loss": 0.0624,
"step": 3960
},
{
"epoch": 2.325717633274751,
"grad_norm": 1.4492905139923096,
"learning_rate": 0.00029755240203098557,
"loss": 0.1021,
"step": 3970
},
{
"epoch": 2.331575864089045,
"grad_norm": 0.294634610414505,
"learning_rate": 0.00029690144512433276,
"loss": 0.118,
"step": 3980
},
{
"epoch": 2.3374340949033394,
"grad_norm": 0.9232218861579895,
"learning_rate": 0.00029625048821767995,
"loss": 0.1014,
"step": 3990
},
{
"epoch": 2.3432923257176332,
"grad_norm": 0.928164005279541,
"learning_rate": 0.00029559953131102725,
"loss": 0.0991,
"step": 4000
},
{
"epoch": 2.3491505565319275,
"grad_norm": 0.6724839806556702,
"learning_rate": 0.00029494857440437444,
"loss": 0.1867,
"step": 4010
},
{
"epoch": 2.3550087873462213,
"grad_norm": 1.1483349800109863,
"learning_rate": 0.00029429761749772163,
"loss": 0.1504,
"step": 4020
},
{
"epoch": 2.3608670181605156,
"grad_norm": 0.6115332841873169,
"learning_rate": 0.0002936466605910689,
"loss": 0.0969,
"step": 4030
},
{
"epoch": 2.3667252489748094,
"grad_norm": 0.43446749448776245,
"learning_rate": 0.00029299570368441607,
"loss": 0.1511,
"step": 4040
},
{
"epoch": 2.3725834797891037,
"grad_norm": 1.4291220903396606,
"learning_rate": 0.0002923447467777633,
"loss": 0.136,
"step": 4050
},
{
"epoch": 2.378441710603398,
"grad_norm": 0.640063464641571,
"learning_rate": 0.00029169378987111056,
"loss": 0.104,
"step": 4060
},
{
"epoch": 2.3842999414176917,
"grad_norm": 0.8535376191139221,
"learning_rate": 0.00029104283296445775,
"loss": 0.1197,
"step": 4070
},
{
"epoch": 2.390158172231986,
"grad_norm": 1.2727094888687134,
"learning_rate": 0.000290391876057805,
"loss": 0.1475,
"step": 4080
},
{
"epoch": 2.39601640304628,
"grad_norm": 1.8613280057907104,
"learning_rate": 0.0002897409191511522,
"loss": 0.121,
"step": 4090
},
{
"epoch": 2.401874633860574,
"grad_norm": 1.230644702911377,
"learning_rate": 0.0002890899622444994,
"loss": 0.0983,
"step": 4100
},
{
"epoch": 2.4077328646748684,
"grad_norm": 0.5424458980560303,
"learning_rate": 0.00028843900533784663,
"loss": 0.1382,
"step": 4110
},
{
"epoch": 2.413591095489162,
"grad_norm": 0.5892564058303833,
"learning_rate": 0.0002877880484311939,
"loss": 0.112,
"step": 4120
},
{
"epoch": 2.4194493263034564,
"grad_norm": 0.936575710773468,
"learning_rate": 0.0002871370915245411,
"loss": 0.1243,
"step": 4130
},
{
"epoch": 2.4253075571177503,
"grad_norm": 1.3329449892044067,
"learning_rate": 0.0002864861346178883,
"loss": 0.1001,
"step": 4140
},
{
"epoch": 2.4311657879320445,
"grad_norm": 0.28441017866134644,
"learning_rate": 0.0002858351777112355,
"loss": 0.1086,
"step": 4150
},
{
"epoch": 2.437024018746339,
"grad_norm": 0.7536140084266663,
"learning_rate": 0.00028518422080458275,
"loss": 0.1712,
"step": 4160
},
{
"epoch": 2.4428822495606326,
"grad_norm": 0.4054887592792511,
"learning_rate": 0.00028453326389792994,
"loss": 0.091,
"step": 4170
},
{
"epoch": 2.448740480374927,
"grad_norm": 0.4561721384525299,
"learning_rate": 0.0002838823069912772,
"loss": 0.1377,
"step": 4180
},
{
"epoch": 2.4545987111892207,
"grad_norm": 0.9957545399665833,
"learning_rate": 0.00028323135008462443,
"loss": 0.1018,
"step": 4190
},
{
"epoch": 2.460456942003515,
"grad_norm": 1.5235391855239868,
"learning_rate": 0.0002825803931779716,
"loss": 0.1161,
"step": 4200
},
{
"epoch": 2.466315172817809,
"grad_norm": 1.5290122032165527,
"learning_rate": 0.00028192943627131887,
"loss": 0.1101,
"step": 4210
},
{
"epoch": 2.472173403632103,
"grad_norm": 0.8591200113296509,
"learning_rate": 0.00028127847936466606,
"loss": 0.1148,
"step": 4220
},
{
"epoch": 2.4780316344463973,
"grad_norm": 1.4811750650405884,
"learning_rate": 0.00028062752245801325,
"loss": 0.1753,
"step": 4230
},
{
"epoch": 2.483889865260691,
"grad_norm": 0.9379531145095825,
"learning_rate": 0.00027997656555136055,
"loss": 0.1451,
"step": 4240
},
{
"epoch": 2.4897480960749854,
"grad_norm": 1.2333440780639648,
"learning_rate": 0.00027932560864470774,
"loss": 0.1101,
"step": 4250
},
{
"epoch": 2.4956063268892796,
"grad_norm": 1.016764760017395,
"learning_rate": 0.00027867465173805493,
"loss": 0.124,
"step": 4260
},
{
"epoch": 2.5014645577035735,
"grad_norm": 1.6167750358581543,
"learning_rate": 0.0002780236948314022,
"loss": 0.1398,
"step": 4270
},
{
"epoch": 2.5073227885178677,
"grad_norm": 0.5222885012626648,
"learning_rate": 0.00027737273792474937,
"loss": 0.097,
"step": 4280
},
{
"epoch": 2.5131810193321615,
"grad_norm": 0.763202428817749,
"learning_rate": 0.00027672178101809656,
"loss": 0.1379,
"step": 4290
},
{
"epoch": 2.519039250146456,
"grad_norm": 0.9483877420425415,
"learning_rate": 0.00027607082411144386,
"loss": 0.1466,
"step": 4300
},
{
"epoch": 2.5248974809607496,
"grad_norm": 0.28170886635780334,
"learning_rate": 0.00027541986720479105,
"loss": 0.0893,
"step": 4310
},
{
"epoch": 2.530755711775044,
"grad_norm": 1.6378175020217896,
"learning_rate": 0.0002747689102981383,
"loss": 0.1025,
"step": 4320
},
{
"epoch": 2.536613942589338,
"grad_norm": 0.8733501434326172,
"learning_rate": 0.0002741179533914855,
"loss": 0.1538,
"step": 4330
},
{
"epoch": 2.542472173403632,
"grad_norm": 0.4998052716255188,
"learning_rate": 0.0002734669964848327,
"loss": 0.12,
"step": 4340
},
{
"epoch": 2.5483304042179262,
"grad_norm": 2.2542619705200195,
"learning_rate": 0.0002728160395781799,
"loss": 0.1392,
"step": 4350
},
{
"epoch": 2.5541886350322205,
"grad_norm": 0.7784902453422546,
"learning_rate": 0.00027216508267152717,
"loss": 0.1211,
"step": 4360
},
{
"epoch": 2.5600468658465143,
"grad_norm": 1.080384612083435,
"learning_rate": 0.00027151412576487436,
"loss": 0.1912,
"step": 4370
},
{
"epoch": 2.5659050966608086,
"grad_norm": 0.5921308398246765,
"learning_rate": 0.0002708631688582216,
"loss": 0.111,
"step": 4380
},
{
"epoch": 2.5717633274751024,
"grad_norm": 1.543337345123291,
"learning_rate": 0.0002702122119515688,
"loss": 0.1058,
"step": 4390
},
{
"epoch": 2.5776215582893967,
"grad_norm": 0.8708186149597168,
"learning_rate": 0.00026956125504491604,
"loss": 0.1268,
"step": 4400
},
{
"epoch": 2.5834797891036905,
"grad_norm": 1.394648790359497,
"learning_rate": 0.00026891029813826323,
"loss": 0.144,
"step": 4410
},
{
"epoch": 2.5893380199179847,
"grad_norm": 0.597211480140686,
"learning_rate": 0.0002682593412316105,
"loss": 0.0984,
"step": 4420
},
{
"epoch": 2.595196250732279,
"grad_norm": 0.30916476249694824,
"learning_rate": 0.0002676083843249577,
"loss": 0.1638,
"step": 4430
},
{
"epoch": 2.601054481546573,
"grad_norm": 0.4281676113605499,
"learning_rate": 0.0002669574274183049,
"loss": 0.1351,
"step": 4440
},
{
"epoch": 2.606912712360867,
"grad_norm": 0.8316947221755981,
"learning_rate": 0.0002663064705116521,
"loss": 0.1458,
"step": 4450
},
{
"epoch": 2.6127709431751613,
"grad_norm": 0.8049978017807007,
"learning_rate": 0.00026565551360499935,
"loss": 0.1022,
"step": 4460
},
{
"epoch": 2.618629173989455,
"grad_norm": 0.4352714419364929,
"learning_rate": 0.00026500455669834655,
"loss": 0.1185,
"step": 4470
},
{
"epoch": 2.6244874048037494,
"grad_norm": 1.2094974517822266,
"learning_rate": 0.00026435359979169385,
"loss": 0.1452,
"step": 4480
},
{
"epoch": 2.6303456356180432,
"grad_norm": 0.7460260987281799,
"learning_rate": 0.00026370264288504104,
"loss": 0.1086,
"step": 4490
},
{
"epoch": 2.6362038664323375,
"grad_norm": 1.3077291250228882,
"learning_rate": 0.00026305168597838823,
"loss": 0.1692,
"step": 4500
},
{
"epoch": 2.6420620972466313,
"grad_norm": 0.5297181010246277,
"learning_rate": 0.0002624007290717355,
"loss": 0.1575,
"step": 4510
},
{
"epoch": 2.6479203280609256,
"grad_norm": 0.9857646226882935,
"learning_rate": 0.00026174977216508266,
"loss": 0.1359,
"step": 4520
},
{
"epoch": 2.65377855887522,
"grad_norm": 0.3707817494869232,
"learning_rate": 0.00026109881525842986,
"loss": 0.1226,
"step": 4530
},
{
"epoch": 2.6596367896895137,
"grad_norm": 1.480391263961792,
"learning_rate": 0.00026044785835177716,
"loss": 0.1046,
"step": 4540
},
{
"epoch": 2.665495020503808,
"grad_norm": 0.7787607908248901,
"learning_rate": 0.00025979690144512435,
"loss": 0.1064,
"step": 4550
},
{
"epoch": 2.671353251318102,
"grad_norm": 0.7129953503608704,
"learning_rate": 0.0002591459445384716,
"loss": 0.0566,
"step": 4560
},
{
"epoch": 2.677211482132396,
"grad_norm": 0.8584160804748535,
"learning_rate": 0.0002584949876318188,
"loss": 0.1639,
"step": 4570
},
{
"epoch": 2.68306971294669,
"grad_norm": 1.6028215885162354,
"learning_rate": 0.000257844030725166,
"loss": 0.1539,
"step": 4580
},
{
"epoch": 2.688927943760984,
"grad_norm": 0.49101898074150085,
"learning_rate": 0.0002571930738185132,
"loss": 0.105,
"step": 4590
},
{
"epoch": 2.6947861745752784,
"grad_norm": 2.8125131130218506,
"learning_rate": 0.0002565421169118604,
"loss": 0.1414,
"step": 4600
},
{
"epoch": 2.700644405389572,
"grad_norm": 0.6218743324279785,
"learning_rate": 0.00025589116000520766,
"loss": 0.123,
"step": 4610
},
{
"epoch": 2.7065026362038664,
"grad_norm": 1.4385967254638672,
"learning_rate": 0.0002552402030985549,
"loss": 0.142,
"step": 4620
},
{
"epoch": 2.7123608670181607,
"grad_norm": 0.36521002650260925,
"learning_rate": 0.0002545892461919021,
"loss": 0.0797,
"step": 4630
},
{
"epoch": 2.7182190978324545,
"grad_norm": 1.6112006902694702,
"learning_rate": 0.0002539382892852493,
"loss": 0.1632,
"step": 4640
},
{
"epoch": 2.724077328646749,
"grad_norm": 0.47123655676841736,
"learning_rate": 0.00025328733237859653,
"loss": 0.1085,
"step": 4650
},
{
"epoch": 2.7299355594610426,
"grad_norm": 0.8812481164932251,
"learning_rate": 0.0002526363754719437,
"loss": 0.1075,
"step": 4660
},
{
"epoch": 2.735793790275337,
"grad_norm": 1.782219648361206,
"learning_rate": 0.000251985418565291,
"loss": 0.1371,
"step": 4670
},
{
"epoch": 2.7416520210896307,
"grad_norm": 1.105644941329956,
"learning_rate": 0.0002513344616586382,
"loss": 0.1572,
"step": 4680
},
{
"epoch": 2.747510251903925,
"grad_norm": 1.425328016281128,
"learning_rate": 0.0002506835047519854,
"loss": 0.131,
"step": 4690
},
{
"epoch": 2.753368482718219,
"grad_norm": 0.43381068110466003,
"learning_rate": 0.00025003254784533265,
"loss": 0.1552,
"step": 4700
},
{
"epoch": 2.759226713532513,
"grad_norm": 2.056269645690918,
"learning_rate": 0.0002493815909386799,
"loss": 0.1155,
"step": 4710
},
{
"epoch": 2.7650849443468073,
"grad_norm": 1.0049457550048828,
"learning_rate": 0.0002487306340320271,
"loss": 0.1399,
"step": 4720
},
{
"epoch": 2.7709431751611016,
"grad_norm": 2.309018135070801,
"learning_rate": 0.0002480796771253743,
"loss": 0.1408,
"step": 4730
},
{
"epoch": 2.7768014059753954,
"grad_norm": 1.171257734298706,
"learning_rate": 0.0002474287202187215,
"loss": 0.1368,
"step": 4740
},
{
"epoch": 2.7826596367896896,
"grad_norm": 1.2910953760147095,
"learning_rate": 0.00024677776331206877,
"loss": 0.1101,
"step": 4750
},
{
"epoch": 2.7885178676039835,
"grad_norm": 0.9543918967247009,
"learning_rate": 0.00024612680640541596,
"loss": 0.1791,
"step": 4760
},
{
"epoch": 2.7943760984182777,
"grad_norm": 0.6492213010787964,
"learning_rate": 0.0002454758494987632,
"loss": 0.1034,
"step": 4770
},
{
"epoch": 2.8002343292325715,
"grad_norm": 1.3879276514053345,
"learning_rate": 0.0002448248925921104,
"loss": 0.1126,
"step": 4780
},
{
"epoch": 2.806092560046866,
"grad_norm": 0.843720555305481,
"learning_rate": 0.00024417393568545764,
"loss": 0.097,
"step": 4790
},
{
"epoch": 2.81195079086116,
"grad_norm": 0.4348345994949341,
"learning_rate": 0.00024352297877880486,
"loss": 0.1093,
"step": 4800
},
{
"epoch": 2.817809021675454,
"grad_norm": 0.7387756705284119,
"learning_rate": 0.00024287202187215208,
"loss": 0.1409,
"step": 4810
},
{
"epoch": 2.823667252489748,
"grad_norm": 1.1606628894805908,
"learning_rate": 0.00024222106496549927,
"loss": 0.1175,
"step": 4820
},
{
"epoch": 2.8295254833040424,
"grad_norm": 0.33284834027290344,
"learning_rate": 0.00024157010805884652,
"loss": 0.1256,
"step": 4830
},
{
"epoch": 2.8353837141183362,
"grad_norm": 1.8327375650405884,
"learning_rate": 0.00024091915115219374,
"loss": 0.1376,
"step": 4840
},
{
"epoch": 2.8412419449326305,
"grad_norm": 0.3788931667804718,
"learning_rate": 0.00024026819424554095,
"loss": 0.1204,
"step": 4850
},
{
"epoch": 2.8471001757469243,
"grad_norm": 0.8661250472068787,
"learning_rate": 0.00023961723733888817,
"loss": 0.1064,
"step": 4860
},
{
"epoch": 2.8529584065612186,
"grad_norm": 0.6688357591629028,
"learning_rate": 0.0002389662804322354,
"loss": 0.1455,
"step": 4870
},
{
"epoch": 2.8588166373755124,
"grad_norm": 0.8732729554176331,
"learning_rate": 0.0002383153235255826,
"loss": 0.0888,
"step": 4880
},
{
"epoch": 2.8646748681898067,
"grad_norm": 0.4746171832084656,
"learning_rate": 0.00023766436661892986,
"loss": 0.1252,
"step": 4890
},
{
"epoch": 2.870533099004101,
"grad_norm": 0.9698277115821838,
"learning_rate": 0.00023701340971227705,
"loss": 0.1077,
"step": 4900
},
{
"epoch": 2.8763913298183947,
"grad_norm": 0.39930036664009094,
"learning_rate": 0.00023636245280562427,
"loss": 0.0795,
"step": 4910
},
{
"epoch": 2.882249560632689,
"grad_norm": 0.9420844912528992,
"learning_rate": 0.0002357114958989715,
"loss": 0.1143,
"step": 4920
},
{
"epoch": 2.8881077914469833,
"grad_norm": 0.8221577405929565,
"learning_rate": 0.00023506053899231873,
"loss": 0.1146,
"step": 4930
},
{
"epoch": 2.893966022261277,
"grad_norm": 0.9865840673446655,
"learning_rate": 0.00023440958208566592,
"loss": 0.1409,
"step": 4940
},
{
"epoch": 2.899824253075571,
"grad_norm": 0.7893118262290955,
"learning_rate": 0.00023375862517901317,
"loss": 0.1549,
"step": 4950
},
{
"epoch": 2.905682483889865,
"grad_norm": 1.0813521146774292,
"learning_rate": 0.00023310766827236038,
"loss": 0.1122,
"step": 4960
},
{
"epoch": 2.9115407147041594,
"grad_norm": 1.1200429201126099,
"learning_rate": 0.0002324567113657076,
"loss": 0.0944,
"step": 4970
},
{
"epoch": 2.9173989455184532,
"grad_norm": 1.6647378206253052,
"learning_rate": 0.0002318057544590548,
"loss": 0.1129,
"step": 4980
},
{
"epoch": 2.9232571763327475,
"grad_norm": 0.7454376220703125,
"learning_rate": 0.00023115479755240204,
"loss": 0.1238,
"step": 4990
},
{
"epoch": 2.9291154071470418,
"grad_norm": 0.5001448392868042,
"learning_rate": 0.00023050384064574926,
"loss": 0.0679,
"step": 5000
},
{
"epoch": 2.9349736379613356,
"grad_norm": 0.8018816709518433,
"learning_rate": 0.00022985288373909645,
"loss": 0.115,
"step": 5010
},
{
"epoch": 2.94083186877563,
"grad_norm": 0.793289840221405,
"learning_rate": 0.0002292019268324437,
"loss": 0.144,
"step": 5020
},
{
"epoch": 2.946690099589924,
"grad_norm": 1.4793994426727295,
"learning_rate": 0.00022855096992579091,
"loss": 0.126,
"step": 5030
},
{
"epoch": 2.952548330404218,
"grad_norm": 1.5347057580947876,
"learning_rate": 0.00022790001301913813,
"loss": 0.1249,
"step": 5040
},
{
"epoch": 2.9584065612185118,
"grad_norm": 0.9479228258132935,
"learning_rate": 0.00022724905611248535,
"loss": 0.1251,
"step": 5050
},
{
"epoch": 2.964264792032806,
"grad_norm": 0.7511537075042725,
"learning_rate": 0.00022659809920583257,
"loss": 0.1311,
"step": 5060
},
{
"epoch": 2.9701230228471003,
"grad_norm": 1.184127926826477,
"learning_rate": 0.0002259471422991798,
"loss": 0.1497,
"step": 5070
},
{
"epoch": 2.975981253661394,
"grad_norm": 0.5232158899307251,
"learning_rate": 0.00022529618539252703,
"loss": 0.1539,
"step": 5080
},
{
"epoch": 2.9818394844756884,
"grad_norm": 0.8987466096878052,
"learning_rate": 0.00022464522848587422,
"loss": 0.0837,
"step": 5090
},
{
"epoch": 2.9876977152899826,
"grad_norm": 0.8917015790939331,
"learning_rate": 0.00022399427157922144,
"loss": 0.1197,
"step": 5100
},
{
"epoch": 2.9935559461042764,
"grad_norm": 0.277315616607666,
"learning_rate": 0.0002233433146725687,
"loss": 0.1388,
"step": 5110
},
{
"epoch": 2.9994141769185707,
"grad_norm": 0.8018264770507812,
"learning_rate": 0.0002226923577659159,
"loss": 0.1113,
"step": 5120
},
{
"epoch": 3.0,
"eval_f1": 0.9308659217877095,
"eval_loss": 0.19312748312950134,
"eval_runtime": 14.7246,
"eval_samples_per_second": 203.741,
"eval_steps_per_second": 6.384,
"step": 5121
},
{
"epoch": 3.0052724077328645,
"grad_norm": 0.6647753715515137,
"learning_rate": 0.0002220414008592631,
"loss": 0.1585,
"step": 5130
},
{
"epoch": 3.011130638547159,
"grad_norm": 1.3105008602142334,
"learning_rate": 0.00022139044395261034,
"loss": 0.1092,
"step": 5140
},
{
"epoch": 3.016988869361453,
"grad_norm": 0.5599445700645447,
"learning_rate": 0.00022073948704595756,
"loss": 0.0809,
"step": 5150
},
{
"epoch": 3.022847100175747,
"grad_norm": 2.208543539047241,
"learning_rate": 0.00022008853013930478,
"loss": 0.1188,
"step": 5160
},
{
"epoch": 3.028705330990041,
"grad_norm": 1.3378957509994507,
"learning_rate": 0.000219437573232652,
"loss": 0.0943,
"step": 5170
},
{
"epoch": 3.034563561804335,
"grad_norm": 1.3608115911483765,
"learning_rate": 0.00021878661632599922,
"loss": 0.0886,
"step": 5180
},
{
"epoch": 3.040421792618629,
"grad_norm": 0.6845038533210754,
"learning_rate": 0.00021813565941934644,
"loss": 0.0881,
"step": 5190
},
{
"epoch": 3.0462800234329235,
"grad_norm": 1.1577521562576294,
"learning_rate": 0.00021748470251269368,
"loss": 0.1168,
"step": 5200
},
{
"epoch": 3.0521382542472173,
"grad_norm": 0.5686136484146118,
"learning_rate": 0.00021683374560604087,
"loss": 0.0874,
"step": 5210
},
{
"epoch": 3.0579964850615116,
"grad_norm": 1.3156239986419678,
"learning_rate": 0.0002161827886993881,
"loss": 0.1072,
"step": 5220
},
{
"epoch": 3.0638547158758054,
"grad_norm": 1.4853498935699463,
"learning_rate": 0.00021553183179273534,
"loss": 0.1296,
"step": 5230
},
{
"epoch": 3.0697129466900996,
"grad_norm": 0.6757405400276184,
"learning_rate": 0.00021488087488608256,
"loss": 0.0878,
"step": 5240
},
{
"epoch": 3.0755711775043935,
"grad_norm": 1.7888120412826538,
"learning_rate": 0.00021422991797942975,
"loss": 0.1324,
"step": 5250
},
{
"epoch": 3.0814294083186877,
"grad_norm": 0.30726903676986694,
"learning_rate": 0.000213578961072777,
"loss": 0.0949,
"step": 5260
},
{
"epoch": 3.087287639132982,
"grad_norm": 1.2144439220428467,
"learning_rate": 0.0002129280041661242,
"loss": 0.0923,
"step": 5270
},
{
"epoch": 3.093145869947276,
"grad_norm": 1.7017472982406616,
"learning_rate": 0.00021227704725947143,
"loss": 0.1178,
"step": 5280
},
{
"epoch": 3.09900410076157,
"grad_norm": 0.6928281784057617,
"learning_rate": 0.00021162609035281865,
"loss": 0.1124,
"step": 5290
},
{
"epoch": 3.104862331575864,
"grad_norm": 1.007668375968933,
"learning_rate": 0.00021097513344616587,
"loss": 0.1586,
"step": 5300
},
{
"epoch": 3.110720562390158,
"grad_norm": 1.729608178138733,
"learning_rate": 0.00021032417653951308,
"loss": 0.1273,
"step": 5310
},
{
"epoch": 3.1165787932044524,
"grad_norm": 0.3571639955043793,
"learning_rate": 0.00020967321963286033,
"loss": 0.1102,
"step": 5320
},
{
"epoch": 3.1224370240187462,
"grad_norm": 0.6538040637969971,
"learning_rate": 0.00020902226272620752,
"loss": 0.0988,
"step": 5330
},
{
"epoch": 3.1282952548330405,
"grad_norm": 1.0057750940322876,
"learning_rate": 0.00020837130581955474,
"loss": 0.0928,
"step": 5340
},
{
"epoch": 3.1341534856473343,
"grad_norm": 0.6291601061820984,
"learning_rate": 0.00020772034891290199,
"loss": 0.0826,
"step": 5350
},
{
"epoch": 3.1400117164616286,
"grad_norm": 1.195389986038208,
"learning_rate": 0.0002070693920062492,
"loss": 0.127,
"step": 5360
},
{
"epoch": 3.145869947275923,
"grad_norm": 1.0781468152999878,
"learning_rate": 0.0002064184350995964,
"loss": 0.1017,
"step": 5370
},
{
"epoch": 3.1517281780902167,
"grad_norm": 0.3608550727367401,
"learning_rate": 0.00020576747819294364,
"loss": 0.1006,
"step": 5380
},
{
"epoch": 3.157586408904511,
"grad_norm": 0.2512178122997284,
"learning_rate": 0.00020511652128629086,
"loss": 0.0945,
"step": 5390
},
{
"epoch": 3.1634446397188047,
"grad_norm": 1.0521883964538574,
"learning_rate": 0.00020446556437963805,
"loss": 0.0758,
"step": 5400
},
{
"epoch": 3.169302870533099,
"grad_norm": 0.8204757571220398,
"learning_rate": 0.0002038146074729853,
"loss": 0.1369,
"step": 5410
},
{
"epoch": 3.1751611013473933,
"grad_norm": 0.2992514669895172,
"learning_rate": 0.00020316365056633251,
"loss": 0.0798,
"step": 5420
},
{
"epoch": 3.181019332161687,
"grad_norm": 2.212993621826172,
"learning_rate": 0.00020251269365967973,
"loss": 0.0935,
"step": 5430
},
{
"epoch": 3.1868775629759813,
"grad_norm": 0.2897680401802063,
"learning_rate": 0.00020186173675302695,
"loss": 0.0947,
"step": 5440
},
{
"epoch": 3.192735793790275,
"grad_norm": 2.02703857421875,
"learning_rate": 0.00020121077984637417,
"loss": 0.1168,
"step": 5450
},
{
"epoch": 3.1985940246045694,
"grad_norm": 0.859622597694397,
"learning_rate": 0.0002005598229397214,
"loss": 0.131,
"step": 5460
},
{
"epoch": 3.2044522554188637,
"grad_norm": 1.419415831565857,
"learning_rate": 0.00019990886603306863,
"loss": 0.0949,
"step": 5470
},
{
"epoch": 3.2103104862331575,
"grad_norm": 0.9570263028144836,
"learning_rate": 0.00019925790912641583,
"loss": 0.0848,
"step": 5480
},
{
"epoch": 3.2161687170474518,
"grad_norm": 0.33368825912475586,
"learning_rate": 0.00019860695221976304,
"loss": 0.1125,
"step": 5490
},
{
"epoch": 3.2220269478617456,
"grad_norm": 0.3054542541503906,
"learning_rate": 0.0001979559953131103,
"loss": 0.0947,
"step": 5500
},
{
"epoch": 3.22788517867604,
"grad_norm": 0.5232771635055542,
"learning_rate": 0.0001973050384064575,
"loss": 0.1145,
"step": 5510
},
{
"epoch": 3.233743409490334,
"grad_norm": 1.5356754064559937,
"learning_rate": 0.0001966540814998047,
"loss": 0.0928,
"step": 5520
},
{
"epoch": 3.239601640304628,
"grad_norm": 1.7630113363265991,
"learning_rate": 0.00019600312459315194,
"loss": 0.0976,
"step": 5530
},
{
"epoch": 3.245459871118922,
"grad_norm": 0.34040358662605286,
"learning_rate": 0.00019535216768649916,
"loss": 0.0716,
"step": 5540
},
{
"epoch": 3.251318101933216,
"grad_norm": 0.37931734323501587,
"learning_rate": 0.00019470121077984638,
"loss": 0.127,
"step": 5550
},
{
"epoch": 3.2571763327475103,
"grad_norm": 1.5484894514083862,
"learning_rate": 0.0001940502538731936,
"loss": 0.1451,
"step": 5560
},
{
"epoch": 3.2630345635618045,
"grad_norm": 1.216162919998169,
"learning_rate": 0.00019339929696654082,
"loss": 0.0996,
"step": 5570
},
{
"epoch": 3.2688927943760984,
"grad_norm": 0.9533673524856567,
"learning_rate": 0.00019274834005988804,
"loss": 0.1204,
"step": 5580
},
{
"epoch": 3.2747510251903926,
"grad_norm": 0.6540274024009705,
"learning_rate": 0.00019209738315323528,
"loss": 0.1175,
"step": 5590
},
{
"epoch": 3.2806092560046864,
"grad_norm": 0.3820449113845825,
"learning_rate": 0.00019144642624658247,
"loss": 0.1616,
"step": 5600
},
{
"epoch": 3.2864674868189807,
"grad_norm": 0.7934489846229553,
"learning_rate": 0.0001907954693399297,
"loss": 0.1217,
"step": 5610
},
{
"epoch": 3.2923257176332745,
"grad_norm": 0.5252192616462708,
"learning_rate": 0.00019014451243327694,
"loss": 0.1411,
"step": 5620
},
{
"epoch": 3.298183948447569,
"grad_norm": 0.40958625078201294,
"learning_rate": 0.00018949355552662416,
"loss": 0.1228,
"step": 5630
},
{
"epoch": 3.304042179261863,
"grad_norm": 0.5728265047073364,
"learning_rate": 0.00018884259861997135,
"loss": 0.0719,
"step": 5640
},
{
"epoch": 3.309900410076157,
"grad_norm": 0.24659055471420288,
"learning_rate": 0.0001881916417133186,
"loss": 0.1438,
"step": 5650
},
{
"epoch": 3.315758640890451,
"grad_norm": 0.8120065927505493,
"learning_rate": 0.0001875406848066658,
"loss": 0.163,
"step": 5660
},
{
"epoch": 3.3216168717047454,
"grad_norm": 0.25870636105537415,
"learning_rate": 0.00018688972790001303,
"loss": 0.0843,
"step": 5670
},
{
"epoch": 3.327475102519039,
"grad_norm": 1.5063252449035645,
"learning_rate": 0.00018623877099336025,
"loss": 0.1041,
"step": 5680
},
{
"epoch": 3.3333333333333335,
"grad_norm": 0.8163599371910095,
"learning_rate": 0.00018558781408670747,
"loss": 0.1018,
"step": 5690
},
{
"epoch": 3.3391915641476273,
"grad_norm": 1.1529045104980469,
"learning_rate": 0.00018493685718005468,
"loss": 0.1135,
"step": 5700
},
{
"epoch": 3.3450497949619216,
"grad_norm": 0.39233940839767456,
"learning_rate": 0.0001842859002734019,
"loss": 0.096,
"step": 5710
},
{
"epoch": 3.3509080257762154,
"grad_norm": 0.9688923358917236,
"learning_rate": 0.00018363494336674912,
"loss": 0.1441,
"step": 5720
},
{
"epoch": 3.3567662565905096,
"grad_norm": 1.1118249893188477,
"learning_rate": 0.00018298398646009634,
"loss": 0.1334,
"step": 5730
},
{
"epoch": 3.362624487404804,
"grad_norm": 0.6611572504043579,
"learning_rate": 0.00018233302955344356,
"loss": 0.0873,
"step": 5740
},
{
"epoch": 3.3684827182190977,
"grad_norm": 1.4174485206604004,
"learning_rate": 0.0001816820726467908,
"loss": 0.138,
"step": 5750
},
{
"epoch": 3.374340949033392,
"grad_norm": 0.5151809453964233,
"learning_rate": 0.000181031115740138,
"loss": 0.121,
"step": 5760
},
{
"epoch": 3.380199179847686,
"grad_norm": 0.7787996530532837,
"learning_rate": 0.00018038015883348521,
"loss": 0.0834,
"step": 5770
},
{
"epoch": 3.38605741066198,
"grad_norm": 0.3302878737449646,
"learning_rate": 0.00017972920192683246,
"loss": 0.107,
"step": 5780
},
{
"epoch": 3.3919156414762743,
"grad_norm": 0.5792067646980286,
"learning_rate": 0.00017907824502017965,
"loss": 0.1134,
"step": 5790
},
{
"epoch": 3.397773872290568,
"grad_norm": 0.9854441285133362,
"learning_rate": 0.00017842728811352687,
"loss": 0.1388,
"step": 5800
},
{
"epoch": 3.4036321031048624,
"grad_norm": 0.3148748278617859,
"learning_rate": 0.00017777633120687411,
"loss": 0.1036,
"step": 5810
},
{
"epoch": 3.4094903339191562,
"grad_norm": 0.8012061715126038,
"learning_rate": 0.00017712537430022133,
"loss": 0.0934,
"step": 5820
},
{
"epoch": 3.4153485647334505,
"grad_norm": 1.5753650665283203,
"learning_rate": 0.00017647441739356852,
"loss": 0.087,
"step": 5830
},
{
"epoch": 3.4212067955477448,
"grad_norm": 0.9657014608383179,
"learning_rate": 0.00017582346048691577,
"loss": 0.1111,
"step": 5840
},
{
"epoch": 3.4270650263620386,
"grad_norm": 2.233928918838501,
"learning_rate": 0.000175172503580263,
"loss": 0.1123,
"step": 5850
},
{
"epoch": 3.432923257176333,
"grad_norm": 0.3911862373352051,
"learning_rate": 0.0001745215466736102,
"loss": 0.0758,
"step": 5860
},
{
"epoch": 3.4387814879906267,
"grad_norm": 0.20068423449993134,
"learning_rate": 0.00017387058976695743,
"loss": 0.1157,
"step": 5870
},
{
"epoch": 3.444639718804921,
"grad_norm": 1.0492706298828125,
"learning_rate": 0.00017321963286030464,
"loss": 0.0744,
"step": 5880
},
{
"epoch": 3.450497949619215,
"grad_norm": 0.5000963807106018,
"learning_rate": 0.00017256867595365186,
"loss": 0.1346,
"step": 5890
},
{
"epoch": 3.456356180433509,
"grad_norm": 0.9173428416252136,
"learning_rate": 0.0001719177190469991,
"loss": 0.0918,
"step": 5900
},
{
"epoch": 3.4622144112478033,
"grad_norm": 0.624330461025238,
"learning_rate": 0.0001712667621403463,
"loss": 0.1151,
"step": 5910
},
{
"epoch": 3.468072642062097,
"grad_norm": 0.414789080619812,
"learning_rate": 0.00017061580523369352,
"loss": 0.0942,
"step": 5920
},
{
"epoch": 3.4739308728763914,
"grad_norm": 1.336959719657898,
"learning_rate": 0.00016996484832704076,
"loss": 0.1209,
"step": 5930
},
{
"epoch": 3.4797891036906856,
"grad_norm": 0.5462775826454163,
"learning_rate": 0.00016931389142038798,
"loss": 0.0892,
"step": 5940
},
{
"epoch": 3.4856473345049794,
"grad_norm": 0.468053936958313,
"learning_rate": 0.00016866293451373517,
"loss": 0.1291,
"step": 5950
},
{
"epoch": 3.4915055653192737,
"grad_norm": 0.46274715662002563,
"learning_rate": 0.00016801197760708242,
"loss": 0.1401,
"step": 5960
},
{
"epoch": 3.4973637961335675,
"grad_norm": 0.5142794847488403,
"learning_rate": 0.00016736102070042964,
"loss": 0.1012,
"step": 5970
},
{
"epoch": 3.503222026947862,
"grad_norm": 0.14458006620407104,
"learning_rate": 0.00016671006379377686,
"loss": 0.0723,
"step": 5980
},
{
"epoch": 3.5090802577621556,
"grad_norm": 1.0561031103134155,
"learning_rate": 0.00016605910688712407,
"loss": 0.1175,
"step": 5990
},
{
"epoch": 3.51493848857645,
"grad_norm": 0.40694454312324524,
"learning_rate": 0.0001654081499804713,
"loss": 0.092,
"step": 6000
},
{
"epoch": 3.520796719390744,
"grad_norm": 0.6344606280326843,
"learning_rate": 0.0001647571930738185,
"loss": 0.0944,
"step": 6010
},
{
"epoch": 3.526654950205038,
"grad_norm": 1.8507148027420044,
"learning_rate": 0.00016410623616716576,
"loss": 0.1041,
"step": 6020
},
{
"epoch": 3.532513181019332,
"grad_norm": 0.6685912013053894,
"learning_rate": 0.00016345527926051295,
"loss": 0.132,
"step": 6030
},
{
"epoch": 3.5383714118336265,
"grad_norm": 1.3463377952575684,
"learning_rate": 0.00016280432235386017,
"loss": 0.1003,
"step": 6040
},
{
"epoch": 3.5442296426479203,
"grad_norm": 2.4183425903320312,
"learning_rate": 0.0001621533654472074,
"loss": 0.1197,
"step": 6050
},
{
"epoch": 3.5500878734622145,
"grad_norm": 0.753265917301178,
"learning_rate": 0.00016150240854055463,
"loss": 0.1071,
"step": 6060
},
{
"epoch": 3.5559461042765084,
"grad_norm": 0.4786781966686249,
"learning_rate": 0.00016085145163390182,
"loss": 0.1032,
"step": 6070
},
{
"epoch": 3.5618043350908026,
"grad_norm": 0.960931658744812,
"learning_rate": 0.00016020049472724907,
"loss": 0.0984,
"step": 6080
},
{
"epoch": 3.5676625659050965,
"grad_norm": 0.32585757970809937,
"learning_rate": 0.00015954953782059629,
"loss": 0.1358,
"step": 6090
},
{
"epoch": 3.5735207967193907,
"grad_norm": 1.226641297340393,
"learning_rate": 0.0001588985809139435,
"loss": 0.1098,
"step": 6100
},
{
"epoch": 3.579379027533685,
"grad_norm": 2.271745204925537,
"learning_rate": 0.00015824762400729072,
"loss": 0.1069,
"step": 6110
},
{
"epoch": 3.585237258347979,
"grad_norm": 0.6549329161643982,
"learning_rate": 0.00015759666710063794,
"loss": 0.0986,
"step": 6120
},
{
"epoch": 3.591095489162273,
"grad_norm": 1.0768980979919434,
"learning_rate": 0.00015694571019398516,
"loss": 0.0883,
"step": 6130
},
{
"epoch": 3.5969537199765673,
"grad_norm": 0.5941785573959351,
"learning_rate": 0.0001562947532873324,
"loss": 0.1091,
"step": 6140
},
{
"epoch": 3.602811950790861,
"grad_norm": 1.535934567451477,
"learning_rate": 0.0001556437963806796,
"loss": 0.0819,
"step": 6150
},
{
"epoch": 3.6086701816051554,
"grad_norm": 0.905866801738739,
"learning_rate": 0.00015499283947402681,
"loss": 0.1222,
"step": 6160
},
{
"epoch": 3.614528412419449,
"grad_norm": 2.089005470275879,
"learning_rate": 0.00015434188256737406,
"loss": 0.1122,
"step": 6170
},
{
"epoch": 3.6203866432337435,
"grad_norm": 1.4815788269042969,
"learning_rate": 0.00015369092566072125,
"loss": 0.1478,
"step": 6180
},
{
"epoch": 3.6262448740480373,
"grad_norm": 1.1967289447784424,
"learning_rate": 0.00015303996875406847,
"loss": 0.0809,
"step": 6190
},
{
"epoch": 3.6321031048623316,
"grad_norm": 0.13463656604290009,
"learning_rate": 0.00015238901184741572,
"loss": 0.0899,
"step": 6200
},
{
"epoch": 3.637961335676626,
"grad_norm": 0.9791207909584045,
"learning_rate": 0.00015173805494076293,
"loss": 0.1219,
"step": 6210
},
{
"epoch": 3.6438195664909196,
"grad_norm": 1.346298336982727,
"learning_rate": 0.00015108709803411013,
"loss": 0.1201,
"step": 6220
},
{
"epoch": 3.649677797305214,
"grad_norm": 0.5890740752220154,
"learning_rate": 0.00015043614112745737,
"loss": 0.0562,
"step": 6230
},
{
"epoch": 3.655536028119508,
"grad_norm": 1.8662189245224,
"learning_rate": 0.0001497851842208046,
"loss": 0.1372,
"step": 6240
},
{
"epoch": 3.661394258933802,
"grad_norm": 0.24533328413963318,
"learning_rate": 0.0001491342273141518,
"loss": 0.1056,
"step": 6250
},
{
"epoch": 3.667252489748096,
"grad_norm": 1.1817823648452759,
"learning_rate": 0.00014848327040749903,
"loss": 0.079,
"step": 6260
},
{
"epoch": 3.67311072056239,
"grad_norm": 1.736426591873169,
"learning_rate": 0.00014783231350084624,
"loss": 0.1301,
"step": 6270
},
{
"epoch": 3.6789689513766843,
"grad_norm": 1.0973541736602783,
"learning_rate": 0.00014718135659419346,
"loss": 0.0838,
"step": 6280
},
{
"epoch": 3.684827182190978,
"grad_norm": 0.12063330411911011,
"learning_rate": 0.0001465303996875407,
"loss": 0.1164,
"step": 6290
},
{
"epoch": 3.6906854130052724,
"grad_norm": 0.8762880563735962,
"learning_rate": 0.0001458794427808879,
"loss": 0.1077,
"step": 6300
},
{
"epoch": 3.6965436438195667,
"grad_norm": 1.3317028284072876,
"learning_rate": 0.00014522848587423512,
"loss": 0.1124,
"step": 6310
},
{
"epoch": 3.7024018746338605,
"grad_norm": 1.8302267789840698,
"learning_rate": 0.00014457752896758236,
"loss": 0.1027,
"step": 6320
},
{
"epoch": 3.7082601054481548,
"grad_norm": 0.6655034422874451,
"learning_rate": 0.00014392657206092958,
"loss": 0.1061,
"step": 6330
},
{
"epoch": 3.7141183362624486,
"grad_norm": 1.7261115312576294,
"learning_rate": 0.00014327561515427677,
"loss": 0.0996,
"step": 6340
},
{
"epoch": 3.719976567076743,
"grad_norm": 1.1165367364883423,
"learning_rate": 0.00014262465824762402,
"loss": 0.0936,
"step": 6350
},
{
"epoch": 3.7258347978910367,
"grad_norm": 2.128887414932251,
"learning_rate": 0.00014197370134097124,
"loss": 0.1273,
"step": 6360
},
{
"epoch": 3.731693028705331,
"grad_norm": 1.7992889881134033,
"learning_rate": 0.00014132274443431846,
"loss": 0.1361,
"step": 6370
},
{
"epoch": 3.737551259519625,
"grad_norm": 1.342489242553711,
"learning_rate": 0.00014067178752766567,
"loss": 0.1468,
"step": 6380
},
{
"epoch": 3.743409490333919,
"grad_norm": 0.9803612232208252,
"learning_rate": 0.0001400208306210129,
"loss": 0.0943,
"step": 6390
},
{
"epoch": 3.7492677211482133,
"grad_norm": 0.8913155198097229,
"learning_rate": 0.0001393698737143601,
"loss": 0.1049,
"step": 6400
},
{
"epoch": 3.7551259519625075,
"grad_norm": 1.2912554740905762,
"learning_rate": 0.00013871891680770736,
"loss": 0.0962,
"step": 6410
},
{
"epoch": 3.7609841827768014,
"grad_norm": 0.7567240595817566,
"learning_rate": 0.00013806795990105455,
"loss": 0.102,
"step": 6420
},
{
"epoch": 3.7668424135910956,
"grad_norm": 1.3401840925216675,
"learning_rate": 0.00013741700299440177,
"loss": 0.1328,
"step": 6430
},
{
"epoch": 3.7727006444053894,
"grad_norm": 0.6872816681861877,
"learning_rate": 0.000136766046087749,
"loss": 0.1094,
"step": 6440
},
{
"epoch": 3.7785588752196837,
"grad_norm": 0.5628686547279358,
"learning_rate": 0.00013611508918109623,
"loss": 0.078,
"step": 6450
},
{
"epoch": 3.7844171060339775,
"grad_norm": 0.6823394298553467,
"learning_rate": 0.00013546413227444342,
"loss": 0.0825,
"step": 6460
},
{
"epoch": 3.790275336848272,
"grad_norm": 1.2937862873077393,
"learning_rate": 0.00013481317536779064,
"loss": 0.1136,
"step": 6470
},
{
"epoch": 3.796133567662566,
"grad_norm": 1.6106140613555908,
"learning_rate": 0.00013416221846113789,
"loss": 0.0787,
"step": 6480
},
{
"epoch": 3.80199179847686,
"grad_norm": 0.094044528901577,
"learning_rate": 0.0001335112615544851,
"loss": 0.0737,
"step": 6490
},
{
"epoch": 3.807850029291154,
"grad_norm": 1.3219674825668335,
"learning_rate": 0.0001328603046478323,
"loss": 0.1171,
"step": 6500
},
{
"epoch": 3.8137082601054484,
"grad_norm": 0.3359241783618927,
"learning_rate": 0.00013220934774117954,
"loss": 0.0874,
"step": 6510
},
{
"epoch": 3.819566490919742,
"grad_norm": 1.6946830749511719,
"learning_rate": 0.00013155839083452676,
"loss": 0.1243,
"step": 6520
},
{
"epoch": 3.8254247217340365,
"grad_norm": 0.7258983850479126,
"learning_rate": 0.00013090743392787395,
"loss": 0.106,
"step": 6530
},
{
"epoch": 3.8312829525483303,
"grad_norm": 1.6327470541000366,
"learning_rate": 0.0001302564770212212,
"loss": 0.0981,
"step": 6540
},
{
"epoch": 3.8371411833626246,
"grad_norm": 0.45188337564468384,
"learning_rate": 0.00012960552011456842,
"loss": 0.049,
"step": 6550
},
{
"epoch": 3.8429994141769184,
"grad_norm": 1.3675637245178223,
"learning_rate": 0.00012895456320791563,
"loss": 0.0854,
"step": 6560
},
{
"epoch": 3.8488576449912126,
"grad_norm": 0.9713359475135803,
"learning_rate": 0.00012830360630126285,
"loss": 0.0827,
"step": 6570
},
{
"epoch": 3.854715875805507,
"grad_norm": 0.08092973381280899,
"learning_rate": 0.00012765264939461007,
"loss": 0.1022,
"step": 6580
},
{
"epoch": 3.8605741066198007,
"grad_norm": 1.1305333375930786,
"learning_rate": 0.0001270016924879573,
"loss": 0.0997,
"step": 6590
},
{
"epoch": 3.866432337434095,
"grad_norm": 2.724644184112549,
"learning_rate": 0.00012635073558130453,
"loss": 0.0637,
"step": 6600
},
{
"epoch": 3.8722905682483892,
"grad_norm": 1.3331657648086548,
"learning_rate": 0.00012569977867465173,
"loss": 0.1093,
"step": 6610
},
{
"epoch": 3.878148799062683,
"grad_norm": 1.6242687702178955,
"learning_rate": 0.00012504882176799894,
"loss": 0.0791,
"step": 6620
},
{
"epoch": 3.884007029876977,
"grad_norm": 0.24345877766609192,
"learning_rate": 0.0001243978648613462,
"loss": 0.0932,
"step": 6630
},
{
"epoch": 3.889865260691271,
"grad_norm": 1.968440294265747,
"learning_rate": 0.0001237469079546934,
"loss": 0.1046,
"step": 6640
},
{
"epoch": 3.8957234915055654,
"grad_norm": 1.3497897386550903,
"learning_rate": 0.00012309595104804063,
"loss": 0.1171,
"step": 6650
},
{
"epoch": 3.9015817223198592,
"grad_norm": 0.794064462184906,
"learning_rate": 0.00012244499414138785,
"loss": 0.0912,
"step": 6660
},
{
"epoch": 3.9074399531341535,
"grad_norm": 1.1040220260620117,
"learning_rate": 0.00012179403723473506,
"loss": 0.0932,
"step": 6670
},
{
"epoch": 3.9132981839484478,
"grad_norm": 0.6346971988677979,
"learning_rate": 0.00012114308032808228,
"loss": 0.0786,
"step": 6680
},
{
"epoch": 3.9191564147627416,
"grad_norm": 1.220487117767334,
"learning_rate": 0.0001204921234214295,
"loss": 0.1191,
"step": 6690
},
{
"epoch": 3.925014645577036,
"grad_norm": 1.4723790884017944,
"learning_rate": 0.00011984116651477672,
"loss": 0.0768,
"step": 6700
},
{
"epoch": 3.93087287639133,
"grad_norm": 0.9130443334579468,
"learning_rate": 0.00011919020960812395,
"loss": 0.0828,
"step": 6710
},
{
"epoch": 3.936731107205624,
"grad_norm": 1.0067200660705566,
"learning_rate": 0.00011853925270147116,
"loss": 0.0703,
"step": 6720
},
{
"epoch": 3.9425893380199177,
"grad_norm": 0.931067168712616,
"learning_rate": 0.00011788829579481839,
"loss": 0.145,
"step": 6730
},
{
"epoch": 3.948447568834212,
"grad_norm": 1.0656850337982178,
"learning_rate": 0.0001172373388881656,
"loss": 0.0963,
"step": 6740
},
{
"epoch": 3.9543057996485063,
"grad_norm": 1.115067958831787,
"learning_rate": 0.00011658638198151282,
"loss": 0.1641,
"step": 6750
},
{
"epoch": 3.9601640304628,
"grad_norm": 0.4096965491771698,
"learning_rate": 0.00011593542507486004,
"loss": 0.0915,
"step": 6760
},
{
"epoch": 3.9660222612770943,
"grad_norm": 0.781139612197876,
"learning_rate": 0.00011528446816820728,
"loss": 0.1007,
"step": 6770
},
{
"epoch": 3.9718804920913886,
"grad_norm": 0.6638227701187134,
"learning_rate": 0.00011463351126155448,
"loss": 0.0853,
"step": 6780
},
{
"epoch": 3.9777387229056824,
"grad_norm": 2.11378812789917,
"learning_rate": 0.00011398255435490171,
"loss": 0.0866,
"step": 6790
},
{
"epoch": 3.9835969537199767,
"grad_norm": 0.9695467948913574,
"learning_rate": 0.00011333159744824893,
"loss": 0.0919,
"step": 6800
},
{
"epoch": 3.9894551845342705,
"grad_norm": 1.504145622253418,
"learning_rate": 0.00011268064054159615,
"loss": 0.1168,
"step": 6810
},
{
"epoch": 3.9953134153485648,
"grad_norm": 2.174940586090088,
"learning_rate": 0.00011202968363494337,
"loss": 0.17,
"step": 6820
},
{
"epoch": 4.0,
"eval_f1": 0.9495159059474412,
"eval_loss": 0.14605550467967987,
"eval_runtime": 15.2718,
"eval_samples_per_second": 196.44,
"eval_steps_per_second": 6.155,
"step": 6828
},
{
"epoch": 4.001171646162859,
"grad_norm": 0.45618754625320435,
"learning_rate": 0.0001113787267282906,
"loss": 0.0885,
"step": 6830
},
{
"epoch": 4.007029876977153,
"grad_norm": 0.5365521907806396,
"learning_rate": 0.0001107277698216378,
"loss": 0.1259,
"step": 6840
},
{
"epoch": 4.012888107791447,
"grad_norm": 0.4301340878009796,
"learning_rate": 0.00011007681291498504,
"loss": 0.0706,
"step": 6850
},
{
"epoch": 4.018746338605741,
"grad_norm": 0.29090505838394165,
"learning_rate": 0.00010942585600833225,
"loss": 0.0633,
"step": 6860
},
{
"epoch": 4.024604569420035,
"grad_norm": 0.9259321093559265,
"learning_rate": 0.00010877489910167947,
"loss": 0.0879,
"step": 6870
},
{
"epoch": 4.0304628002343295,
"grad_norm": 0.9292414784431458,
"learning_rate": 0.00010812394219502669,
"loss": 0.0803,
"step": 6880
},
{
"epoch": 4.036321031048623,
"grad_norm": 0.9546783566474915,
"learning_rate": 0.00010747298528837392,
"loss": 0.0461,
"step": 6890
},
{
"epoch": 4.042179261862917,
"grad_norm": 1.73061203956604,
"learning_rate": 0.00010682202838172113,
"loss": 0.0864,
"step": 6900
},
{
"epoch": 4.048037492677212,
"grad_norm": 1.5470154285430908,
"learning_rate": 0.00010617107147506836,
"loss": 0.0595,
"step": 6910
},
{
"epoch": 4.053895723491506,
"grad_norm": 0.4693801999092102,
"learning_rate": 0.00010552011456841558,
"loss": 0.122,
"step": 6920
},
{
"epoch": 4.059753954305799,
"grad_norm": 0.8462125658988953,
"learning_rate": 0.00010486915766176278,
"loss": 0.1058,
"step": 6930
},
{
"epoch": 4.065612185120094,
"grad_norm": 0.8536199927330017,
"learning_rate": 0.00010421820075511002,
"loss": 0.0799,
"step": 6940
},
{
"epoch": 4.071470415934388,
"grad_norm": 0.09983082860708237,
"learning_rate": 0.00010356724384845723,
"loss": 0.0425,
"step": 6950
},
{
"epoch": 4.077328646748682,
"grad_norm": 2.1790366172790527,
"learning_rate": 0.00010291628694180445,
"loss": 0.0846,
"step": 6960
},
{
"epoch": 4.083186877562976,
"grad_norm": 1.6550222635269165,
"learning_rate": 0.00010226533003515167,
"loss": 0.0812,
"step": 6970
},
{
"epoch": 4.08904510837727,
"grad_norm": 1.3716167211532593,
"learning_rate": 0.0001016143731284989,
"loss": 0.0949,
"step": 6980
},
{
"epoch": 4.094903339191564,
"grad_norm": 0.5519310832023621,
"learning_rate": 0.00010096341622184611,
"loss": 0.0448,
"step": 6990
},
{
"epoch": 4.100761570005858,
"grad_norm": 0.1261308640241623,
"learning_rate": 0.00010031245931519334,
"loss": 0.0681,
"step": 7000
},
{
"epoch": 4.106619800820153,
"grad_norm": 3.2982888221740723,
"learning_rate": 9.966150240854056e-05,
"loss": 0.0542,
"step": 7010
},
{
"epoch": 4.1124780316344465,
"grad_norm": 1.6753637790679932,
"learning_rate": 9.901054550188778e-05,
"loss": 0.0618,
"step": 7020
},
{
"epoch": 4.11833626244874,
"grad_norm": 1.3481342792510986,
"learning_rate": 9.8359588595235e-05,
"loss": 0.0991,
"step": 7030
},
{
"epoch": 4.124194493263035,
"grad_norm": 1.577588677406311,
"learning_rate": 9.770863168858221e-05,
"loss": 0.0559,
"step": 7040
},
{
"epoch": 4.130052724077329,
"grad_norm": 0.1360217034816742,
"learning_rate": 9.705767478192943e-05,
"loss": 0.0527,
"step": 7050
},
{
"epoch": 4.135910954891623,
"grad_norm": 0.4467345178127289,
"learning_rate": 9.640671787527666e-05,
"loss": 0.0903,
"step": 7060
},
{
"epoch": 4.1417691857059165,
"grad_norm": 0.2530747354030609,
"learning_rate": 9.575576096862387e-05,
"loss": 0.0697,
"step": 7070
},
{
"epoch": 4.147627416520211,
"grad_norm": 0.8840377926826477,
"learning_rate": 9.51048040619711e-05,
"loss": 0.0959,
"step": 7080
},
{
"epoch": 4.153485647334505,
"grad_norm": 1.2627636194229126,
"learning_rate": 9.445384715531832e-05,
"loss": 0.0849,
"step": 7090
},
{
"epoch": 4.159343878148799,
"grad_norm": 2.104389190673828,
"learning_rate": 9.380289024866554e-05,
"loss": 0.0869,
"step": 7100
},
{
"epoch": 4.1652021089630935,
"grad_norm": 0.8246046304702759,
"learning_rate": 9.315193334201276e-05,
"loss": 0.0991,
"step": 7110
},
{
"epoch": 4.171060339777387,
"grad_norm": 1.2681169509887695,
"learning_rate": 9.250097643535999e-05,
"loss": 0.127,
"step": 7120
},
{
"epoch": 4.176918570591681,
"grad_norm": 0.6696609854698181,
"learning_rate": 9.18500195287072e-05,
"loss": 0.0894,
"step": 7130
},
{
"epoch": 4.182776801405975,
"grad_norm": 1.066689133644104,
"learning_rate": 9.119906262205443e-05,
"loss": 0.0874,
"step": 7140
},
{
"epoch": 4.18863503222027,
"grad_norm": 0.2604907155036926,
"learning_rate": 9.054810571540164e-05,
"loss": 0.0812,
"step": 7150
},
{
"epoch": 4.1944932630345635,
"grad_norm": 0.5059824585914612,
"learning_rate": 8.989714880874886e-05,
"loss": 0.0683,
"step": 7160
},
{
"epoch": 4.200351493848857,
"grad_norm": 0.43476417660713196,
"learning_rate": 8.924619190209608e-05,
"loss": 0.0648,
"step": 7170
},
{
"epoch": 4.206209724663152,
"grad_norm": 0.4302019476890564,
"learning_rate": 8.859523499544331e-05,
"loss": 0.0873,
"step": 7180
},
{
"epoch": 4.212067955477446,
"grad_norm": 0.7343120574951172,
"learning_rate": 8.794427808879052e-05,
"loss": 0.0909,
"step": 7190
},
{
"epoch": 4.21792618629174,
"grad_norm": 1.6828593015670776,
"learning_rate": 8.729332118213775e-05,
"loss": 0.0659,
"step": 7200
},
{
"epoch": 4.223784417106034,
"grad_norm": 1.096401333808899,
"learning_rate": 8.664236427548497e-05,
"loss": 0.0789,
"step": 7210
},
{
"epoch": 4.229642647920328,
"grad_norm": 0.8949251770973206,
"learning_rate": 8.599140736883219e-05,
"loss": 0.0659,
"step": 7220
},
{
"epoch": 4.235500878734622,
"grad_norm": 2.401662588119507,
"learning_rate": 8.53404504621794e-05,
"loss": 0.0947,
"step": 7230
},
{
"epoch": 4.241359109548916,
"grad_norm": 0.17847134172916412,
"learning_rate": 8.468949355552664e-05,
"loss": 0.0576,
"step": 7240
},
{
"epoch": 4.2472173403632105,
"grad_norm": 1.1923648118972778,
"learning_rate": 8.403853664887384e-05,
"loss": 0.0913,
"step": 7250
},
{
"epoch": 4.253075571177504,
"grad_norm": 1.0581272840499878,
"learning_rate": 8.338757974222107e-05,
"loss": 0.0991,
"step": 7260
},
{
"epoch": 4.258933801991798,
"grad_norm": 0.5614556670188904,
"learning_rate": 8.273662283556829e-05,
"loss": 0.0954,
"step": 7270
},
{
"epoch": 4.264792032806093,
"grad_norm": 0.4855095148086548,
"learning_rate": 8.208566592891551e-05,
"loss": 0.1072,
"step": 7280
},
{
"epoch": 4.270650263620387,
"grad_norm": 0.4003661274909973,
"learning_rate": 8.143470902226273e-05,
"loss": 0.0616,
"step": 7290
},
{
"epoch": 4.2765084944346805,
"grad_norm": 1.0927107334136963,
"learning_rate": 8.078375211560996e-05,
"loss": 0.03,
"step": 7300
},
{
"epoch": 4.282366725248975,
"grad_norm": 1.1179447174072266,
"learning_rate": 8.013279520895717e-05,
"loss": 0.1378,
"step": 7310
},
{
"epoch": 4.288224956063269,
"grad_norm": 0.48047196865081787,
"learning_rate": 7.948183830230438e-05,
"loss": 0.074,
"step": 7320
},
{
"epoch": 4.294083186877563,
"grad_norm": 0.5955802202224731,
"learning_rate": 7.883088139565162e-05,
"loss": 0.0636,
"step": 7330
},
{
"epoch": 4.299941417691857,
"grad_norm": 0.6766892075538635,
"learning_rate": 7.817992448899882e-05,
"loss": 0.0752,
"step": 7340
},
{
"epoch": 4.305799648506151,
"grad_norm": 0.9996503591537476,
"learning_rate": 7.752896758234605e-05,
"loss": 0.047,
"step": 7350
},
{
"epoch": 4.311657879320445,
"grad_norm": 1.4891853332519531,
"learning_rate": 7.687801067569327e-05,
"loss": 0.0873,
"step": 7360
},
{
"epoch": 4.317516110134739,
"grad_norm": 0.449779212474823,
"learning_rate": 7.622705376904049e-05,
"loss": 0.0792,
"step": 7370
},
{
"epoch": 4.323374340949034,
"grad_norm": 1.9570748805999756,
"learning_rate": 7.557609686238771e-05,
"loss": 0.0836,
"step": 7380
},
{
"epoch": 4.3292325717633275,
"grad_norm": 0.9605783224105835,
"learning_rate": 7.492513995573493e-05,
"loss": 0.1052,
"step": 7390
},
{
"epoch": 4.335090802577621,
"grad_norm": 2.792238712310791,
"learning_rate": 7.427418304908215e-05,
"loss": 0.0913,
"step": 7400
},
{
"epoch": 4.340949033391915,
"grad_norm": 0.2096194326877594,
"learning_rate": 7.362322614242938e-05,
"loss": 0.063,
"step": 7410
},
{
"epoch": 4.34680726420621,
"grad_norm": 0.6416425108909607,
"learning_rate": 7.297226923577658e-05,
"loss": 0.0934,
"step": 7420
},
{
"epoch": 4.352665495020504,
"grad_norm": 0.5825894474983215,
"learning_rate": 7.232131232912381e-05,
"loss": 0.086,
"step": 7430
},
{
"epoch": 4.3585237258347975,
"grad_norm": 1.1459928750991821,
"learning_rate": 7.167035542247103e-05,
"loss": 0.122,
"step": 7440
},
{
"epoch": 4.364381956649092,
"grad_norm": 1.567478895187378,
"learning_rate": 7.101939851581825e-05,
"loss": 0.0627,
"step": 7450
},
{
"epoch": 4.370240187463386,
"grad_norm": 1.6177845001220703,
"learning_rate": 7.036844160916547e-05,
"loss": 0.0993,
"step": 7460
},
{
"epoch": 4.37609841827768,
"grad_norm": 2.1598875522613525,
"learning_rate": 6.97174847025127e-05,
"loss": 0.0849,
"step": 7470
},
{
"epoch": 4.381956649091975,
"grad_norm": 0.6902780532836914,
"learning_rate": 6.90665277958599e-05,
"loss": 0.111,
"step": 7480
},
{
"epoch": 4.387814879906268,
"grad_norm": 2.869980573654175,
"learning_rate": 6.841557088920714e-05,
"loss": 0.141,
"step": 7490
},
{
"epoch": 4.393673110720562,
"grad_norm": 0.31494468450546265,
"learning_rate": 6.776461398255436e-05,
"loss": 0.0818,
"step": 7500
},
{
"epoch": 4.399531341534857,
"grad_norm": 0.4729668200016022,
"learning_rate": 6.711365707590158e-05,
"loss": 0.0808,
"step": 7510
},
{
"epoch": 4.405389572349151,
"grad_norm": 1.1855378150939941,
"learning_rate": 6.64627001692488e-05,
"loss": 0.0634,
"step": 7520
},
{
"epoch": 4.411247803163445,
"grad_norm": 1.2448687553405762,
"learning_rate": 6.581174326259603e-05,
"loss": 0.0801,
"step": 7530
},
{
"epoch": 4.417106033977738,
"grad_norm": 2.6101768016815186,
"learning_rate": 6.516078635594323e-05,
"loss": 0.0942,
"step": 7540
},
{
"epoch": 4.422964264792033,
"grad_norm": 0.29874828457832336,
"learning_rate": 6.450982944929046e-05,
"loss": 0.0317,
"step": 7550
},
{
"epoch": 4.428822495606327,
"grad_norm": 0.7493123412132263,
"learning_rate": 6.385887254263768e-05,
"loss": 0.1051,
"step": 7560
},
{
"epoch": 4.434680726420621,
"grad_norm": 2.9272103309631348,
"learning_rate": 6.32079156359849e-05,
"loss": 0.0689,
"step": 7570
},
{
"epoch": 4.440538957234915,
"grad_norm": 0.1127224862575531,
"learning_rate": 6.255695872933212e-05,
"loss": 0.0628,
"step": 7580
},
{
"epoch": 4.446397188049209,
"grad_norm": 0.74460768699646,
"learning_rate": 6.190600182267934e-05,
"loss": 0.0694,
"step": 7590
},
{
"epoch": 4.452255418863503,
"grad_norm": 1.3040841817855835,
"learning_rate": 6.125504491602657e-05,
"loss": 0.0642,
"step": 7600
},
{
"epoch": 4.458113649677797,
"grad_norm": 0.5990577936172485,
"learning_rate": 6.060408800937378e-05,
"loss": 0.0823,
"step": 7610
},
{
"epoch": 4.463971880492092,
"grad_norm": 1.0939172506332397,
"learning_rate": 5.9953131102721e-05,
"loss": 0.0664,
"step": 7620
},
{
"epoch": 4.469830111306385,
"grad_norm": 0.4238429367542267,
"learning_rate": 5.9302174196068224e-05,
"loss": 0.0908,
"step": 7630
},
{
"epoch": 4.475688342120679,
"grad_norm": 0.3552704155445099,
"learning_rate": 5.865121728941544e-05,
"loss": 0.0996,
"step": 7640
},
{
"epoch": 4.481546572934974,
"grad_norm": 0.44584277272224426,
"learning_rate": 5.800026038276266e-05,
"loss": 0.0522,
"step": 7650
},
{
"epoch": 4.487404803749268,
"grad_norm": 1.605562448501587,
"learning_rate": 5.7349303476109886e-05,
"loss": 0.0935,
"step": 7660
},
{
"epoch": 4.493263034563562,
"grad_norm": 1.6356998682022095,
"learning_rate": 5.6698346569457104e-05,
"loss": 0.0821,
"step": 7670
},
{
"epoch": 4.499121265377856,
"grad_norm": 1.8082013130187988,
"learning_rate": 5.604738966280432e-05,
"loss": 0.0748,
"step": 7680
},
{
"epoch": 4.50497949619215,
"grad_norm": 2.335904359817505,
"learning_rate": 5.539643275615154e-05,
"loss": 0.1107,
"step": 7690
},
{
"epoch": 4.510837727006444,
"grad_norm": 2.8229901790618896,
"learning_rate": 5.4745475849498766e-05,
"loss": 0.0908,
"step": 7700
},
{
"epoch": 4.516695957820739,
"grad_norm": 0.3684004247188568,
"learning_rate": 5.4094518942845985e-05,
"loss": 0.0578,
"step": 7710
},
{
"epoch": 4.5225541886350324,
"grad_norm": 0.6283183693885803,
"learning_rate": 5.34435620361932e-05,
"loss": 0.0323,
"step": 7720
},
{
"epoch": 4.528412419449326,
"grad_norm": 2.3907487392425537,
"learning_rate": 5.279260512954043e-05,
"loss": 0.09,
"step": 7730
},
{
"epoch": 4.53427065026362,
"grad_norm": 1.1042436361312866,
"learning_rate": 5.214164822288765e-05,
"loss": 0.0775,
"step": 7740
},
{
"epoch": 4.540128881077915,
"grad_norm": 1.1733710765838623,
"learning_rate": 5.1490691316234865e-05,
"loss": 0.0773,
"step": 7750
},
{
"epoch": 4.545987111892209,
"grad_norm": 1.506988525390625,
"learning_rate": 5.083973440958209e-05,
"loss": 0.0922,
"step": 7760
},
{
"epoch": 4.551845342706502,
"grad_norm": 0.33986783027648926,
"learning_rate": 5.018877750292931e-05,
"loss": 0.1085,
"step": 7770
},
{
"epoch": 4.557703573520797,
"grad_norm": 2.0390729904174805,
"learning_rate": 4.953782059627653e-05,
"loss": 0.0657,
"step": 7780
},
{
"epoch": 4.563561804335091,
"grad_norm": 0.731065571308136,
"learning_rate": 4.888686368962375e-05,
"loss": 0.0725,
"step": 7790
},
{
"epoch": 4.569420035149385,
"grad_norm": 1.5336552858352661,
"learning_rate": 4.823590678297097e-05,
"loss": 0.1152,
"step": 7800
},
{
"epoch": 4.575278265963679,
"grad_norm": 1.6048200130462646,
"learning_rate": 4.758494987631819e-05,
"loss": 0.0727,
"step": 7810
},
{
"epoch": 4.581136496777973,
"grad_norm": 1.8822131156921387,
"learning_rate": 4.6933992969665415e-05,
"loss": 0.0926,
"step": 7820
},
{
"epoch": 4.586994727592267,
"grad_norm": 1.853079915046692,
"learning_rate": 4.628303606301263e-05,
"loss": 0.0745,
"step": 7830
},
{
"epoch": 4.592852958406561,
"grad_norm": 0.5452135801315308,
"learning_rate": 4.563207915635985e-05,
"loss": 0.0898,
"step": 7840
},
{
"epoch": 4.598711189220856,
"grad_norm": 0.16546402871608734,
"learning_rate": 4.498112224970708e-05,
"loss": 0.0756,
"step": 7850
},
{
"epoch": 4.6045694200351495,
"grad_norm": 1.2575308084487915,
"learning_rate": 4.4330165343054295e-05,
"loss": 0.0458,
"step": 7860
},
{
"epoch": 4.610427650849443,
"grad_norm": 2.0601093769073486,
"learning_rate": 4.367920843640151e-05,
"loss": 0.047,
"step": 7870
},
{
"epoch": 4.616285881663737,
"grad_norm": 1.2107033729553223,
"learning_rate": 4.3028251529748725e-05,
"loss": 0.0702,
"step": 7880
},
{
"epoch": 4.622144112478032,
"grad_norm": 1.5647666454315186,
"learning_rate": 4.237729462309595e-05,
"loss": 0.0559,
"step": 7890
},
{
"epoch": 4.628002343292326,
"grad_norm": 0.9503174424171448,
"learning_rate": 4.172633771644317e-05,
"loss": 0.0579,
"step": 7900
},
{
"epoch": 4.6338605741066194,
"grad_norm": 1.5842559337615967,
"learning_rate": 4.107538080979039e-05,
"loss": 0.0941,
"step": 7910
},
{
"epoch": 4.639718804920914,
"grad_norm": 0.5423364639282227,
"learning_rate": 4.042442390313761e-05,
"loss": 0.0358,
"step": 7920
},
{
"epoch": 4.645577035735208,
"grad_norm": 0.35448887944221497,
"learning_rate": 3.977346699648483e-05,
"loss": 0.0342,
"step": 7930
},
{
"epoch": 4.651435266549502,
"grad_norm": 0.5956054925918579,
"learning_rate": 3.912251008983205e-05,
"loss": 0.0918,
"step": 7940
},
{
"epoch": 4.6572934973637965,
"grad_norm": 0.8710196018218994,
"learning_rate": 3.8471553183179275e-05,
"loss": 0.0689,
"step": 7950
},
{
"epoch": 4.66315172817809,
"grad_norm": 1.2711126804351807,
"learning_rate": 3.782059627652649e-05,
"loss": 0.0706,
"step": 7960
},
{
"epoch": 4.669009958992384,
"grad_norm": 1.3305224180221558,
"learning_rate": 3.716963936987371e-05,
"loss": 0.0902,
"step": 7970
},
{
"epoch": 4.674868189806679,
"grad_norm": 2.224329948425293,
"learning_rate": 3.651868246322094e-05,
"loss": 0.084,
"step": 7980
},
{
"epoch": 4.680726420620973,
"grad_norm": 2.547774076461792,
"learning_rate": 3.5867725556568155e-05,
"loss": 0.0492,
"step": 7990
},
{
"epoch": 4.6865846514352665,
"grad_norm": 0.06184905394911766,
"learning_rate": 3.5216768649915374e-05,
"loss": 0.0577,
"step": 8000
},
{
"epoch": 4.69244288224956,
"grad_norm": 1.6255323886871338,
"learning_rate": 3.45658117432626e-05,
"loss": 0.0673,
"step": 8010
},
{
"epoch": 4.698301113063855,
"grad_norm": 1.4682468175888062,
"learning_rate": 3.391485483660982e-05,
"loss": 0.0767,
"step": 8020
},
{
"epoch": 4.704159343878149,
"grad_norm": 2.154526472091675,
"learning_rate": 3.3263897929957036e-05,
"loss": 0.0806,
"step": 8030
},
{
"epoch": 4.710017574692443,
"grad_norm": 1.4415132999420166,
"learning_rate": 3.2612941023304254e-05,
"loss": 0.0657,
"step": 8040
},
{
"epoch": 4.715875805506737,
"grad_norm": 2.2767250537872314,
"learning_rate": 3.196198411665148e-05,
"loss": 0.0645,
"step": 8050
},
{
"epoch": 4.721734036321031,
"grad_norm": 1.7800607681274414,
"learning_rate": 3.13110272099987e-05,
"loss": 0.0997,
"step": 8060
},
{
"epoch": 4.727592267135325,
"grad_norm": 0.4202064871788025,
"learning_rate": 3.0660070303345917e-05,
"loss": 0.0715,
"step": 8070
},
{
"epoch": 4.733450497949619,
"grad_norm": 1.201169490814209,
"learning_rate": 3.0009113396693142e-05,
"loss": 0.0883,
"step": 8080
},
{
"epoch": 4.7393087287639135,
"grad_norm": 1.4814995527267456,
"learning_rate": 2.935815649004036e-05,
"loss": 0.0464,
"step": 8090
},
{
"epoch": 4.745166959578207,
"grad_norm": 1.5350182056427002,
"learning_rate": 2.8707199583387582e-05,
"loss": 0.0605,
"step": 8100
},
{
"epoch": 4.751025190392501,
"grad_norm": 0.17659243941307068,
"learning_rate": 2.80562426767348e-05,
"loss": 0.0636,
"step": 8110
},
{
"epoch": 4.756883421206796,
"grad_norm": 0.8797257542610168,
"learning_rate": 2.7405285770082022e-05,
"loss": 0.043,
"step": 8120
},
{
"epoch": 4.76274165202109,
"grad_norm": 1.0583323240280151,
"learning_rate": 2.6754328863429244e-05,
"loss": 0.0305,
"step": 8130
},
{
"epoch": 4.7685998828353835,
"grad_norm": 1.5315287113189697,
"learning_rate": 2.6103371956776463e-05,
"loss": 0.0446,
"step": 8140
},
{
"epoch": 4.774458113649677,
"grad_norm": 0.2287391722202301,
"learning_rate": 2.545241505012368e-05,
"loss": 0.0947,
"step": 8150
},
{
"epoch": 4.780316344463972,
"grad_norm": 0.09398193657398224,
"learning_rate": 2.4801458143470903e-05,
"loss": 0.0783,
"step": 8160
},
{
"epoch": 4.786174575278266,
"grad_norm": 1.1325687170028687,
"learning_rate": 2.415050123681812e-05,
"loss": 0.081,
"step": 8170
},
{
"epoch": 4.79203280609256,
"grad_norm": 0.5710904598236084,
"learning_rate": 2.3499544330165343e-05,
"loss": 0.0852,
"step": 8180
},
{
"epoch": 4.797891036906854,
"grad_norm": 0.3212810754776001,
"learning_rate": 2.284858742351256e-05,
"loss": 0.0315,
"step": 8190
},
{
"epoch": 4.803749267721148,
"grad_norm": 0.5738294124603271,
"learning_rate": 2.2197630516859783e-05,
"loss": 0.062,
"step": 8200
},
{
"epoch": 4.809607498535442,
"grad_norm": 3.216890811920166,
"learning_rate": 2.1546673610207005e-05,
"loss": 0.0752,
"step": 8210
},
{
"epoch": 4.815465729349737,
"grad_norm": 0.953929603099823,
"learning_rate": 2.0895716703554224e-05,
"loss": 0.0529,
"step": 8220
},
{
"epoch": 4.8213239601640305,
"grad_norm": 0.4356798827648163,
"learning_rate": 2.0244759796901445e-05,
"loss": 0.1011,
"step": 8230
},
{
"epoch": 4.827182190978324,
"grad_norm": 1.009763479232788,
"learning_rate": 1.9593802890248667e-05,
"loss": 0.0795,
"step": 8240
},
{
"epoch": 4.833040421792619,
"grad_norm": 1.7814884185791016,
"learning_rate": 1.8942845983595886e-05,
"loss": 0.0712,
"step": 8250
},
{
"epoch": 4.838898652606913,
"grad_norm": 0.352679580450058,
"learning_rate": 1.8291889076943108e-05,
"loss": 0.0627,
"step": 8260
},
{
"epoch": 4.844756883421207,
"grad_norm": 2.5139098167419434,
"learning_rate": 1.764093217029033e-05,
"loss": 0.0543,
"step": 8270
},
{
"epoch": 4.8506151142355005,
"grad_norm": 1.804341435432434,
"learning_rate": 1.6989975263637548e-05,
"loss": 0.0854,
"step": 8280
},
{
"epoch": 4.856473345049795,
"grad_norm": 0.4135284721851349,
"learning_rate": 1.633901835698477e-05,
"loss": 0.0646,
"step": 8290
},
{
"epoch": 4.862331575864089,
"grad_norm": 2.144594192504883,
"learning_rate": 1.5688061450331988e-05,
"loss": 0.0888,
"step": 8300
},
{
"epoch": 4.868189806678383,
"grad_norm": 0.48692771792411804,
"learning_rate": 1.5037104543679208e-05,
"loss": 0.0783,
"step": 8310
},
{
"epoch": 4.874048037492678,
"grad_norm": 0.4761165976524353,
"learning_rate": 1.4386147637026428e-05,
"loss": 0.0787,
"step": 8320
},
{
"epoch": 4.879906268306971,
"grad_norm": 2.453779697418213,
"learning_rate": 1.373519073037365e-05,
"loss": 0.085,
"step": 8330
},
{
"epoch": 4.885764499121265,
"grad_norm": 0.2788262665271759,
"learning_rate": 1.308423382372087e-05,
"loss": 0.0797,
"step": 8340
},
{
"epoch": 4.891622729935559,
"grad_norm": 1.32106614112854,
"learning_rate": 1.243327691706809e-05,
"loss": 0.0453,
"step": 8350
},
{
"epoch": 4.897480960749854,
"grad_norm": 0.27709731459617615,
"learning_rate": 1.178232001041531e-05,
"loss": 0.0665,
"step": 8360
},
{
"epoch": 4.9033391915641475,
"grad_norm": 0.5980609655380249,
"learning_rate": 1.1131363103762532e-05,
"loss": 0.0666,
"step": 8370
},
{
"epoch": 4.909197422378441,
"grad_norm": 0.24671287834644318,
"learning_rate": 1.0480406197109753e-05,
"loss": 0.052,
"step": 8380
},
{
"epoch": 4.915055653192736,
"grad_norm": 0.6081691980361938,
"learning_rate": 9.829449290456971e-06,
"loss": 0.0812,
"step": 8390
},
{
"epoch": 4.92091388400703,
"grad_norm": 0.37573495507240295,
"learning_rate": 9.178492383804191e-06,
"loss": 0.0421,
"step": 8400
},
{
"epoch": 4.926772114821324,
"grad_norm": 1.504637598991394,
"learning_rate": 8.527535477151413e-06,
"loss": 0.0747,
"step": 8410
},
{
"epoch": 4.932630345635618,
"grad_norm": 0.8133247494697571,
"learning_rate": 7.876578570498633e-06,
"loss": 0.1044,
"step": 8420
},
{
"epoch": 4.938488576449912,
"grad_norm": 1.2968809604644775,
"learning_rate": 7.225621663845853e-06,
"loss": 0.0999,
"step": 8430
},
{
"epoch": 4.944346807264206,
"grad_norm": 1.6766571998596191,
"learning_rate": 6.574664757193074e-06,
"loss": 0.0636,
"step": 8440
},
{
"epoch": 4.950205038078501,
"grad_norm": 1.0582013130187988,
"learning_rate": 5.923707850540294e-06,
"loss": 0.0686,
"step": 8450
},
{
"epoch": 4.956063268892795,
"grad_norm": 1.32669198513031,
"learning_rate": 5.2727509438875145e-06,
"loss": 0.0632,
"step": 8460
},
{
"epoch": 4.961921499707088,
"grad_norm": 0.4580432176589966,
"learning_rate": 4.621794037234735e-06,
"loss": 0.0913,
"step": 8470
},
{
"epoch": 4.967779730521382,
"grad_norm": 0.21711011230945587,
"learning_rate": 3.970837130581956e-06,
"loss": 0.0618,
"step": 8480
},
{
"epoch": 4.973637961335677,
"grad_norm": 1.316470742225647,
"learning_rate": 3.3198802239291758e-06,
"loss": 0.0528,
"step": 8490
},
{
"epoch": 4.979496192149971,
"grad_norm": 0.8495000004768372,
"learning_rate": 2.6689233172763963e-06,
"loss": 0.0685,
"step": 8500
},
{
"epoch": 4.985354422964265,
"grad_norm": 0.20742426812648773,
"learning_rate": 2.017966410623617e-06,
"loss": 0.0892,
"step": 8510
},
{
"epoch": 4.991212653778559,
"grad_norm": 0.988993227481842,
"learning_rate": 1.367009503970837e-06,
"loss": 0.0613,
"step": 8520
},
{
"epoch": 4.997070884592853,
"grad_norm": 1.7386090755462646,
"learning_rate": 7.160525973180576e-07,
"loss": 0.0695,
"step": 8530
},
{
"epoch": 5.0,
"eval_f1": 0.9536241841291653,
"eval_loss": 0.12523502111434937,
"eval_runtime": 14.5928,
"eval_samples_per_second": 205.581,
"eval_steps_per_second": 6.442,
"step": 8535
},
{
"epoch": 5.0,
"step": 8535,
"total_flos": 1.4339856401173094e+19,
"train_loss": 0.13944385742400917,
"train_runtime": 1629.9678,
"train_samples_per_second": 167.519,
"train_steps_per_second": 5.236
}
],
"logging_steps": 10,
"max_steps": 8535,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.4339856401173094e+19,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}