xp177 / checkpoint-360 /trainer_state.json
ugaoo's picture
Upload folder using huggingface_hub
8c22156 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.940092165898617,
"eval_steps": 500,
"global_step": 360,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.013824884792626729,
"grad_norm": 34.963134765625,
"learning_rate": 5.0000000000000004e-08,
"loss": 2.5476,
"step": 1
},
{
"epoch": 0.027649769585253458,
"grad_norm": 35.32600021362305,
"learning_rate": 1.0000000000000001e-07,
"loss": 2.6058,
"step": 2
},
{
"epoch": 0.041474654377880185,
"grad_norm": 34.955448150634766,
"learning_rate": 1.5000000000000002e-07,
"loss": 2.5871,
"step": 3
},
{
"epoch": 0.055299539170506916,
"grad_norm": 35.09806442260742,
"learning_rate": 2.0000000000000002e-07,
"loss": 2.5912,
"step": 4
},
{
"epoch": 0.06912442396313365,
"grad_norm": 34.88739776611328,
"learning_rate": 2.5000000000000004e-07,
"loss": 2.592,
"step": 5
},
{
"epoch": 0.08294930875576037,
"grad_norm": 34.84288024902344,
"learning_rate": 3.0000000000000004e-07,
"loss": 2.5609,
"step": 6
},
{
"epoch": 0.0967741935483871,
"grad_norm": 35.0090217590332,
"learning_rate": 3.5000000000000004e-07,
"loss": 2.5651,
"step": 7
},
{
"epoch": 0.11059907834101383,
"grad_norm": 35.03983688354492,
"learning_rate": 4.0000000000000003e-07,
"loss": 2.5437,
"step": 8
},
{
"epoch": 0.12442396313364056,
"grad_norm": 34.802833557128906,
"learning_rate": 4.5000000000000003e-07,
"loss": 2.5448,
"step": 9
},
{
"epoch": 0.1382488479262673,
"grad_norm": 34.5220947265625,
"learning_rate": 5.000000000000001e-07,
"loss": 2.504,
"step": 10
},
{
"epoch": 0.15207373271889402,
"grad_norm": 34.401580810546875,
"learning_rate": 5.5e-07,
"loss": 2.4814,
"step": 11
},
{
"epoch": 0.16589861751152074,
"grad_norm": 33.76997375488281,
"learning_rate": 6.000000000000001e-07,
"loss": 2.4282,
"step": 12
},
{
"epoch": 0.17972350230414746,
"grad_norm": 33.53415298461914,
"learning_rate": 6.5e-07,
"loss": 2.4216,
"step": 13
},
{
"epoch": 0.1935483870967742,
"grad_norm": 32.401580810546875,
"learning_rate": 7.000000000000001e-07,
"loss": 2.3362,
"step": 14
},
{
"epoch": 0.2073732718894009,
"grad_norm": 33.636661529541016,
"learning_rate": 7.5e-07,
"loss": 2.2978,
"step": 15
},
{
"epoch": 0.22119815668202766,
"grad_norm": 31.3782901763916,
"learning_rate": 8.000000000000001e-07,
"loss": 2.1358,
"step": 16
},
{
"epoch": 0.2350230414746544,
"grad_norm": 30.72391700744629,
"learning_rate": 8.500000000000001e-07,
"loss": 2.0652,
"step": 17
},
{
"epoch": 0.2488479262672811,
"grad_norm": 30.817584991455078,
"learning_rate": 9.000000000000001e-07,
"loss": 2.0115,
"step": 18
},
{
"epoch": 0.2626728110599078,
"grad_norm": 29.683996200561523,
"learning_rate": 9.500000000000001e-07,
"loss": 1.8668,
"step": 19
},
{
"epoch": 0.2764976958525346,
"grad_norm": 29.506683349609375,
"learning_rate": 1.0000000000000002e-06,
"loss": 1.7796,
"step": 20
},
{
"epoch": 0.2903225806451613,
"grad_norm": 27.55340003967285,
"learning_rate": 1.0500000000000001e-06,
"loss": 1.5656,
"step": 21
},
{
"epoch": 0.30414746543778803,
"grad_norm": 27.78036880493164,
"learning_rate": 1.1e-06,
"loss": 1.5112,
"step": 22
},
{
"epoch": 0.31797235023041476,
"grad_norm": 26.36115264892578,
"learning_rate": 1.1500000000000002e-06,
"loss": 1.3283,
"step": 23
},
{
"epoch": 0.3317972350230415,
"grad_norm": 25.388761520385742,
"learning_rate": 1.2000000000000002e-06,
"loss": 1.137,
"step": 24
},
{
"epoch": 0.3456221198156682,
"grad_norm": 25.21432876586914,
"learning_rate": 1.25e-06,
"loss": 0.9867,
"step": 25
},
{
"epoch": 0.35944700460829493,
"grad_norm": 24.924489974975586,
"learning_rate": 1.3e-06,
"loss": 0.7122,
"step": 26
},
{
"epoch": 0.37327188940092165,
"grad_norm": 21.881420135498047,
"learning_rate": 1.3500000000000002e-06,
"loss": 0.4952,
"step": 27
},
{
"epoch": 0.3870967741935484,
"grad_norm": 17.67154884338379,
"learning_rate": 1.4000000000000001e-06,
"loss": 0.3602,
"step": 28
},
{
"epoch": 0.4009216589861751,
"grad_norm": 11.489490509033203,
"learning_rate": 1.45e-06,
"loss": 0.2432,
"step": 29
},
{
"epoch": 0.4147465437788018,
"grad_norm": 7.622438907623291,
"learning_rate": 1.5e-06,
"loss": 0.189,
"step": 30
},
{
"epoch": 0.42857142857142855,
"grad_norm": 4.340638637542725,
"learning_rate": 1.5500000000000002e-06,
"loss": 0.1302,
"step": 31
},
{
"epoch": 0.4423963133640553,
"grad_norm": 3.079514980316162,
"learning_rate": 1.6000000000000001e-06,
"loss": 0.1075,
"step": 32
},
{
"epoch": 0.45622119815668205,
"grad_norm": 2.355943441390991,
"learning_rate": 1.6500000000000003e-06,
"loss": 0.0998,
"step": 33
},
{
"epoch": 0.4700460829493088,
"grad_norm": 1.9480725526809692,
"learning_rate": 1.7000000000000002e-06,
"loss": 0.0926,
"step": 34
},
{
"epoch": 0.4838709677419355,
"grad_norm": 1.8598166704177856,
"learning_rate": 1.75e-06,
"loss": 0.0733,
"step": 35
},
{
"epoch": 0.4976958525345622,
"grad_norm": 0.9892730712890625,
"learning_rate": 1.8000000000000001e-06,
"loss": 0.0664,
"step": 36
},
{
"epoch": 0.511520737327189,
"grad_norm": 0.8992418050765991,
"learning_rate": 1.85e-06,
"loss": 0.0709,
"step": 37
},
{
"epoch": 0.5253456221198156,
"grad_norm": 0.7340101599693298,
"learning_rate": 1.9000000000000002e-06,
"loss": 0.0535,
"step": 38
},
{
"epoch": 0.5391705069124424,
"grad_norm": 0.7032178044319153,
"learning_rate": 1.9500000000000004e-06,
"loss": 0.0573,
"step": 39
},
{
"epoch": 0.5529953917050692,
"grad_norm": 0.6449429392814636,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.0576,
"step": 40
},
{
"epoch": 0.5668202764976958,
"grad_norm": 0.6358592510223389,
"learning_rate": 2.05e-06,
"loss": 0.0502,
"step": 41
},
{
"epoch": 0.5806451612903226,
"grad_norm": 0.572036623954773,
"learning_rate": 2.1000000000000002e-06,
"loss": 0.0556,
"step": 42
},
{
"epoch": 0.5944700460829493,
"grad_norm": 0.6538863778114319,
"learning_rate": 2.15e-06,
"loss": 0.0556,
"step": 43
},
{
"epoch": 0.6082949308755761,
"grad_norm": 0.3532159626483917,
"learning_rate": 2.2e-06,
"loss": 0.0452,
"step": 44
},
{
"epoch": 0.6221198156682027,
"grad_norm": 0.4853012263774872,
"learning_rate": 2.25e-06,
"loss": 0.0471,
"step": 45
},
{
"epoch": 0.6359447004608295,
"grad_norm": 0.4761648178100586,
"learning_rate": 2.3000000000000004e-06,
"loss": 0.0469,
"step": 46
},
{
"epoch": 0.6497695852534562,
"grad_norm": 0.6094638109207153,
"learning_rate": 2.35e-06,
"loss": 0.047,
"step": 47
},
{
"epoch": 0.663594470046083,
"grad_norm": 0.5211306214332581,
"learning_rate": 2.4000000000000003e-06,
"loss": 0.0402,
"step": 48
},
{
"epoch": 0.6774193548387096,
"grad_norm": 0.2997778356075287,
"learning_rate": 2.4500000000000003e-06,
"loss": 0.0425,
"step": 49
},
{
"epoch": 0.6912442396313364,
"grad_norm": 0.37834689021110535,
"learning_rate": 2.5e-06,
"loss": 0.0446,
"step": 50
},
{
"epoch": 0.7050691244239631,
"grad_norm": 0.31011995673179626,
"learning_rate": 2.55e-06,
"loss": 0.0406,
"step": 51
},
{
"epoch": 0.7188940092165899,
"grad_norm": 0.3113131523132324,
"learning_rate": 2.6e-06,
"loss": 0.0368,
"step": 52
},
{
"epoch": 0.7327188940092166,
"grad_norm": 0.5685846209526062,
"learning_rate": 2.6500000000000005e-06,
"loss": 0.0389,
"step": 53
},
{
"epoch": 0.7465437788018433,
"grad_norm": 0.29334983229637146,
"learning_rate": 2.7000000000000004e-06,
"loss": 0.0423,
"step": 54
},
{
"epoch": 0.7603686635944701,
"grad_norm": 0.5776861906051636,
"learning_rate": 2.7500000000000004e-06,
"loss": 0.0399,
"step": 55
},
{
"epoch": 0.7741935483870968,
"grad_norm": 0.35423165559768677,
"learning_rate": 2.8000000000000003e-06,
"loss": 0.0357,
"step": 56
},
{
"epoch": 0.7880184331797235,
"grad_norm": 0.37902742624282837,
"learning_rate": 2.85e-06,
"loss": 0.0407,
"step": 57
},
{
"epoch": 0.8018433179723502,
"grad_norm": 0.26948878169059753,
"learning_rate": 2.9e-06,
"loss": 0.0351,
"step": 58
},
{
"epoch": 0.815668202764977,
"grad_norm": 0.35688117146492004,
"learning_rate": 2.95e-06,
"loss": 0.0377,
"step": 59
},
{
"epoch": 0.8294930875576036,
"grad_norm": 0.5287911891937256,
"learning_rate": 3e-06,
"loss": 0.0377,
"step": 60
},
{
"epoch": 0.8433179723502304,
"grad_norm": 0.2950785756111145,
"learning_rate": 3.05e-06,
"loss": 0.0361,
"step": 61
},
{
"epoch": 0.8571428571428571,
"grad_norm": 0.2789723575115204,
"learning_rate": 3.1000000000000004e-06,
"loss": 0.032,
"step": 62
},
{
"epoch": 0.8709677419354839,
"grad_norm": 0.2802198529243469,
"learning_rate": 3.1500000000000003e-06,
"loss": 0.0394,
"step": 63
},
{
"epoch": 0.8847926267281107,
"grad_norm": 0.286981463432312,
"learning_rate": 3.2000000000000003e-06,
"loss": 0.033,
"step": 64
},
{
"epoch": 0.8986175115207373,
"grad_norm": 0.37392762303352356,
"learning_rate": 3.2500000000000002e-06,
"loss": 0.0335,
"step": 65
},
{
"epoch": 0.9124423963133641,
"grad_norm": 0.25025588274002075,
"learning_rate": 3.3000000000000006e-06,
"loss": 0.0311,
"step": 66
},
{
"epoch": 0.9262672811059908,
"grad_norm": 0.4292861521244049,
"learning_rate": 3.3500000000000005e-06,
"loss": 0.0362,
"step": 67
},
{
"epoch": 0.9400921658986175,
"grad_norm": 0.4717651307582855,
"learning_rate": 3.4000000000000005e-06,
"loss": 0.0303,
"step": 68
},
{
"epoch": 0.9539170506912442,
"grad_norm": 0.49291253089904785,
"learning_rate": 3.45e-06,
"loss": 0.0352,
"step": 69
},
{
"epoch": 0.967741935483871,
"grad_norm": 0.3729935586452484,
"learning_rate": 3.5e-06,
"loss": 0.0297,
"step": 70
},
{
"epoch": 0.9815668202764977,
"grad_norm": 0.27150583267211914,
"learning_rate": 3.5500000000000003e-06,
"loss": 0.0326,
"step": 71
},
{
"epoch": 0.9953917050691244,
"grad_norm": 0.34516096115112305,
"learning_rate": 3.6000000000000003e-06,
"loss": 0.0336,
"step": 72
},
{
"epoch": 1.0,
"grad_norm": 0.34516096115112305,
"learning_rate": 3.65e-06,
"loss": 0.0274,
"step": 73
},
{
"epoch": 1.0138248847926268,
"grad_norm": 0.6282734870910645,
"learning_rate": 3.7e-06,
"loss": 0.0289,
"step": 74
},
{
"epoch": 1.0276497695852536,
"grad_norm": 0.2935558557510376,
"learning_rate": 3.7500000000000005e-06,
"loss": 0.0308,
"step": 75
},
{
"epoch": 1.0414746543778801,
"grad_norm": 0.3166769742965698,
"learning_rate": 3.8000000000000005e-06,
"loss": 0.0277,
"step": 76
},
{
"epoch": 1.055299539170507,
"grad_norm": 0.38190239667892456,
"learning_rate": 3.85e-06,
"loss": 0.0338,
"step": 77
},
{
"epoch": 1.0691244239631337,
"grad_norm": 0.2779421806335449,
"learning_rate": 3.900000000000001e-06,
"loss": 0.03,
"step": 78
},
{
"epoch": 1.0829493087557605,
"grad_norm": 0.4055996537208557,
"learning_rate": 3.95e-06,
"loss": 0.0295,
"step": 79
},
{
"epoch": 1.096774193548387,
"grad_norm": 0.2987312972545624,
"learning_rate": 4.000000000000001e-06,
"loss": 0.028,
"step": 80
},
{
"epoch": 1.1105990783410138,
"grad_norm": 0.2674776017665863,
"learning_rate": 4.05e-06,
"loss": 0.0243,
"step": 81
},
{
"epoch": 1.1244239631336406,
"grad_norm": 0.29042816162109375,
"learning_rate": 4.1e-06,
"loss": 0.0318,
"step": 82
},
{
"epoch": 1.1382488479262673,
"grad_norm": 0.2904883027076721,
"learning_rate": 4.15e-06,
"loss": 0.0257,
"step": 83
},
{
"epoch": 1.1520737327188941,
"grad_norm": 0.30603015422821045,
"learning_rate": 4.2000000000000004e-06,
"loss": 0.0284,
"step": 84
},
{
"epoch": 1.1658986175115207,
"grad_norm": 0.23131045699119568,
"learning_rate": 4.25e-06,
"loss": 0.0285,
"step": 85
},
{
"epoch": 1.1797235023041475,
"grad_norm": 0.26788002252578735,
"learning_rate": 4.3e-06,
"loss": 0.0269,
"step": 86
},
{
"epoch": 1.1935483870967742,
"grad_norm": 0.2639651894569397,
"learning_rate": 4.350000000000001e-06,
"loss": 0.0289,
"step": 87
},
{
"epoch": 1.2073732718894008,
"grad_norm": 0.25068584084510803,
"learning_rate": 4.4e-06,
"loss": 0.0275,
"step": 88
},
{
"epoch": 1.2211981566820276,
"grad_norm": 0.25494542717933655,
"learning_rate": 4.450000000000001e-06,
"loss": 0.0275,
"step": 89
},
{
"epoch": 1.2350230414746544,
"grad_norm": 0.31125035881996155,
"learning_rate": 4.5e-06,
"loss": 0.0251,
"step": 90
},
{
"epoch": 1.2488479262672811,
"grad_norm": 0.2691773474216461,
"learning_rate": 4.5500000000000005e-06,
"loss": 0.0267,
"step": 91
},
{
"epoch": 1.262672811059908,
"grad_norm": 0.20079147815704346,
"learning_rate": 4.600000000000001e-06,
"loss": 0.0263,
"step": 92
},
{
"epoch": 1.2764976958525347,
"grad_norm": 0.28027331829071045,
"learning_rate": 4.65e-06,
"loss": 0.0227,
"step": 93
},
{
"epoch": 1.2903225806451613,
"grad_norm": 0.40053099393844604,
"learning_rate": 4.7e-06,
"loss": 0.0246,
"step": 94
},
{
"epoch": 1.304147465437788,
"grad_norm": 0.33066362142562866,
"learning_rate": 4.75e-06,
"loss": 0.0221,
"step": 95
},
{
"epoch": 1.3179723502304148,
"grad_norm": 0.2531339228153229,
"learning_rate": 4.800000000000001e-06,
"loss": 0.0216,
"step": 96
},
{
"epoch": 1.3317972350230414,
"grad_norm": 0.37544378638267517,
"learning_rate": 4.85e-06,
"loss": 0.0247,
"step": 97
},
{
"epoch": 1.3456221198156681,
"grad_norm": 0.34273672103881836,
"learning_rate": 4.9000000000000005e-06,
"loss": 0.0217,
"step": 98
},
{
"epoch": 1.359447004608295,
"grad_norm": 0.2338661253452301,
"learning_rate": 4.95e-06,
"loss": 0.0237,
"step": 99
},
{
"epoch": 1.3732718894009217,
"grad_norm": 0.30151981115341187,
"learning_rate": 5e-06,
"loss": 0.0248,
"step": 100
},
{
"epoch": 1.3870967741935485,
"grad_norm": 0.3205336630344391,
"learning_rate": 4.999888074163108e-06,
"loss": 0.0232,
"step": 101
},
{
"epoch": 1.400921658986175,
"grad_norm": 0.2705315351486206,
"learning_rate": 4.999552306674345e-06,
"loss": 0.0245,
"step": 102
},
{
"epoch": 1.4147465437788018,
"grad_norm": 0.2564137578010559,
"learning_rate": 4.998992727598557e-06,
"loss": 0.0274,
"step": 103
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.1967611312866211,
"learning_rate": 4.998209387040829e-06,
"loss": 0.0173,
"step": 104
},
{
"epoch": 1.4423963133640554,
"grad_norm": 0.2568240761756897,
"learning_rate": 4.9972023551419995e-06,
"loss": 0.0223,
"step": 105
},
{
"epoch": 1.456221198156682,
"grad_norm": 0.2236352413892746,
"learning_rate": 4.995971722072379e-06,
"loss": 0.0202,
"step": 106
},
{
"epoch": 1.4700460829493087,
"grad_norm": 0.3389627933502197,
"learning_rate": 4.9945175980236745e-06,
"loss": 0.0214,
"step": 107
},
{
"epoch": 1.4838709677419355,
"grad_norm": 0.31428012251853943,
"learning_rate": 4.992840113199131e-06,
"loss": 0.0188,
"step": 108
},
{
"epoch": 1.4976958525345623,
"grad_norm": 0.41508516669273376,
"learning_rate": 4.990939417801859e-06,
"loss": 0.0213,
"step": 109
},
{
"epoch": 1.511520737327189,
"grad_norm": 0.19615545868873596,
"learning_rate": 4.988815682021398e-06,
"loss": 0.0191,
"step": 110
},
{
"epoch": 1.5253456221198156,
"grad_norm": 0.2059931755065918,
"learning_rate": 4.986469096018472e-06,
"loss": 0.0208,
"step": 111
},
{
"epoch": 1.5391705069124424,
"grad_norm": 0.26946336030960083,
"learning_rate": 4.983899869907963e-06,
"loss": 0.0192,
"step": 112
},
{
"epoch": 1.5529953917050692,
"grad_norm": 0.3227538466453552,
"learning_rate": 4.981108233740096e-06,
"loss": 0.0169,
"step": 113
},
{
"epoch": 1.5668202764976957,
"grad_norm": 0.2811918258666992,
"learning_rate": 4.978094437479843e-06,
"loss": 0.0151,
"step": 114
},
{
"epoch": 1.5806451612903225,
"grad_norm": 0.32980477809906006,
"learning_rate": 4.97485875098454e-06,
"loss": 0.0182,
"step": 115
},
{
"epoch": 1.5944700460829493,
"grad_norm": 0.2759259045124054,
"learning_rate": 4.971401463979722e-06,
"loss": 0.0192,
"step": 116
},
{
"epoch": 1.608294930875576,
"grad_norm": 0.2572178840637207,
"learning_rate": 4.967722886033181e-06,
"loss": 0.0198,
"step": 117
},
{
"epoch": 1.6221198156682028,
"grad_norm": 0.3238658905029297,
"learning_rate": 4.963823346527249e-06,
"loss": 0.0186,
"step": 118
},
{
"epoch": 1.6359447004608296,
"grad_norm": 0.3834918737411499,
"learning_rate": 4.959703194629304e-06,
"loss": 0.0188,
"step": 119
},
{
"epoch": 1.6497695852534562,
"grad_norm": 0.23881244659423828,
"learning_rate": 4.955362799260507e-06,
"loss": 0.0182,
"step": 120
},
{
"epoch": 1.663594470046083,
"grad_norm": 0.1885918825864792,
"learning_rate": 4.950802549062764e-06,
"loss": 0.0183,
"step": 121
},
{
"epoch": 1.6774193548387095,
"grad_norm": 0.34959614276885986,
"learning_rate": 4.946022852363932e-06,
"loss": 0.0173,
"step": 122
},
{
"epoch": 1.6912442396313363,
"grad_norm": 0.22990310192108154,
"learning_rate": 4.9410241371412525e-06,
"loss": 0.0135,
"step": 123
},
{
"epoch": 1.705069124423963,
"grad_norm": 0.2790350615978241,
"learning_rate": 4.935806850983034e-06,
"loss": 0.0159,
"step": 124
},
{
"epoch": 1.7188940092165899,
"grad_norm": 0.3218020796775818,
"learning_rate": 4.9303714610485705e-06,
"loss": 0.0176,
"step": 125
},
{
"epoch": 1.7327188940092166,
"grad_norm": 0.2294609695672989,
"learning_rate": 4.924718454026318e-06,
"loss": 0.0149,
"step": 126
},
{
"epoch": 1.7465437788018434,
"grad_norm": 0.3427927494049072,
"learning_rate": 4.918848336090309e-06,
"loss": 0.0165,
"step": 127
},
{
"epoch": 1.7603686635944702,
"grad_norm": 0.22731825709342957,
"learning_rate": 4.912761632854834e-06,
"loss": 0.0145,
"step": 128
},
{
"epoch": 1.7741935483870968,
"grad_norm": 0.35364386439323425,
"learning_rate": 4.906458889327375e-06,
"loss": 0.0161,
"step": 129
},
{
"epoch": 1.7880184331797235,
"grad_norm": 0.29476454854011536,
"learning_rate": 4.899940669859807e-06,
"loss": 0.0154,
"step": 130
},
{
"epoch": 1.80184331797235,
"grad_norm": 0.28667864203453064,
"learning_rate": 4.893207558097867e-06,
"loss": 0.0143,
"step": 131
},
{
"epoch": 1.8156682027649769,
"grad_norm": 0.2731999158859253,
"learning_rate": 4.8862601569288885e-06,
"loss": 0.0141,
"step": 132
},
{
"epoch": 1.8294930875576036,
"grad_norm": 0.2670470178127289,
"learning_rate": 4.879099088427824e-06,
"loss": 0.0131,
"step": 133
},
{
"epoch": 1.8433179723502304,
"grad_norm": 0.23313525319099426,
"learning_rate": 4.871724993801541e-06,
"loss": 0.012,
"step": 134
},
{
"epoch": 1.8571428571428572,
"grad_norm": 0.2192607820034027,
"learning_rate": 4.864138533331411e-06,
"loss": 0.0125,
"step": 135
},
{
"epoch": 1.870967741935484,
"grad_norm": 0.26603585481643677,
"learning_rate": 4.8563403863141825e-06,
"loss": 0.0121,
"step": 136
},
{
"epoch": 1.8847926267281108,
"grad_norm": 0.32500001788139343,
"learning_rate": 4.84833125100116e-06,
"loss": 0.0116,
"step": 137
},
{
"epoch": 1.8986175115207373,
"grad_norm": 0.24893291294574738,
"learning_rate": 4.840111844535682e-06,
"loss": 0.0119,
"step": 138
},
{
"epoch": 1.912442396313364,
"grad_norm": 0.17670764029026031,
"learning_rate": 4.8316829028889076e-06,
"loss": 0.0096,
"step": 139
},
{
"epoch": 1.9262672811059907,
"grad_norm": 0.16747575998306274,
"learning_rate": 4.823045180793914e-06,
"loss": 0.0113,
"step": 140
},
{
"epoch": 1.9400921658986174,
"grad_norm": 0.19587458670139313,
"learning_rate": 4.8141994516781196e-06,
"loss": 0.0111,
"step": 141
},
{
"epoch": 1.9539170506912442,
"grad_norm": 0.237543985247612,
"learning_rate": 4.805146507594034e-06,
"loss": 0.0088,
"step": 142
},
{
"epoch": 1.967741935483871,
"grad_norm": 0.22710399329662323,
"learning_rate": 4.7958871591483305e-06,
"loss": 0.0085,
"step": 143
},
{
"epoch": 1.9815668202764978,
"grad_norm": 0.2946629822254181,
"learning_rate": 4.786422235429269e-06,
"loss": 0.0122,
"step": 144
},
{
"epoch": 1.9953917050691246,
"grad_norm": 0.2763853371143341,
"learning_rate": 4.776752583932455e-06,
"loss": 0.0118,
"step": 145
},
{
"epoch": 2.0,
"grad_norm": 0.2763853371143341,
"learning_rate": 4.766879070484957e-06,
"loss": 0.0078,
"step": 146
},
{
"epoch": 2.013824884792627,
"grad_norm": 0.2722196877002716,
"learning_rate": 4.756802579167781e-06,
"loss": 0.0076,
"step": 147
},
{
"epoch": 2.0276497695852536,
"grad_norm": 0.18556565046310425,
"learning_rate": 4.746524012236706e-06,
"loss": 0.0091,
"step": 148
},
{
"epoch": 2.0414746543778803,
"grad_norm": 0.24442361295223236,
"learning_rate": 4.736044290041496e-06,
"loss": 0.009,
"step": 149
},
{
"epoch": 2.055299539170507,
"grad_norm": 0.24207571148872375,
"learning_rate": 4.725364350943492e-06,
"loss": 0.0085,
"step": 150
},
{
"epoch": 2.0691244239631335,
"grad_norm": 0.18502290546894073,
"learning_rate": 4.714485151231593e-06,
"loss": 0.0059,
"step": 151
},
{
"epoch": 2.0829493087557602,
"grad_norm": 0.3010450303554535,
"learning_rate": 4.703407665036622e-06,
"loss": 0.0071,
"step": 152
},
{
"epoch": 2.096774193548387,
"grad_norm": 0.23272967338562012,
"learning_rate": 4.692132884244113e-06,
"loss": 0.0074,
"step": 153
},
{
"epoch": 2.110599078341014,
"grad_norm": 0.25476181507110596,
"learning_rate": 4.680661818405485e-06,
"loss": 0.0082,
"step": 154
},
{
"epoch": 2.1244239631336406,
"grad_norm": 0.24534538388252258,
"learning_rate": 4.668995494647653e-06,
"loss": 0.0065,
"step": 155
},
{
"epoch": 2.1382488479262673,
"grad_norm": 0.1642732173204422,
"learning_rate": 4.657134957581057e-06,
"loss": 0.0054,
"step": 156
},
{
"epoch": 2.152073732718894,
"grad_norm": 0.21100501716136932,
"learning_rate": 4.645081269206128e-06,
"loss": 0.0091,
"step": 157
},
{
"epoch": 2.165898617511521,
"grad_norm": 0.19043587148189545,
"learning_rate": 4.632835508818192e-06,
"loss": 0.0047,
"step": 158
},
{
"epoch": 2.1797235023041477,
"grad_norm": 0.1804375797510147,
"learning_rate": 4.620398772910833e-06,
"loss": 0.0068,
"step": 159
},
{
"epoch": 2.193548387096774,
"grad_norm": 0.6586657762527466,
"learning_rate": 4.607772175077712e-06,
"loss": 0.0049,
"step": 160
},
{
"epoch": 2.207373271889401,
"grad_norm": 0.18181656301021576,
"learning_rate": 4.59495684591285e-06,
"loss": 0.0071,
"step": 161
},
{
"epoch": 2.2211981566820276,
"grad_norm": 0.760053813457489,
"learning_rate": 4.581953932909403e-06,
"loss": 0.0065,
"step": 162
},
{
"epoch": 2.2350230414746544,
"grad_norm": 0.1935238242149353,
"learning_rate": 4.5687646003569055e-06,
"loss": 0.0066,
"step": 163
},
{
"epoch": 2.248847926267281,
"grad_norm": 0.3035024404525757,
"learning_rate": 4.555390029237026e-06,
"loss": 0.0046,
"step": 164
},
{
"epoch": 2.262672811059908,
"grad_norm": 0.16596420109272003,
"learning_rate": 4.541831417117815e-06,
"loss": 0.007,
"step": 165
},
{
"epoch": 2.2764976958525347,
"grad_norm": 0.2578873336315155,
"learning_rate": 4.528089978046481e-06,
"loss": 0.0048,
"step": 166
},
{
"epoch": 2.2903225806451615,
"grad_norm": 1.7751781940460205,
"learning_rate": 4.514166942440679e-06,
"loss": 0.0041,
"step": 167
},
{
"epoch": 2.3041474654377883,
"grad_norm": 0.37872445583343506,
"learning_rate": 4.5000635569783365e-06,
"loss": 0.0045,
"step": 168
},
{
"epoch": 2.3179723502304146,
"grad_norm": 0.22949594259262085,
"learning_rate": 4.4857810844860325e-06,
"loss": 0.0071,
"step": 169
},
{
"epoch": 2.3317972350230414,
"grad_norm": 0.34662699699401855,
"learning_rate": 4.471320803825915e-06,
"loss": 0.006,
"step": 170
},
{
"epoch": 2.345622119815668,
"grad_norm": 0.5892661213874817,
"learning_rate": 4.4566840097811956e-06,
"loss": 0.0055,
"step": 171
},
{
"epoch": 2.359447004608295,
"grad_norm": 0.18866907060146332,
"learning_rate": 4.4418720129402145e-06,
"loss": 0.0036,
"step": 172
},
{
"epoch": 2.3732718894009217,
"grad_norm": 0.1510942429304123,
"learning_rate": 4.426886139579083e-06,
"loss": 0.0065,
"step": 173
},
{
"epoch": 2.3870967741935485,
"grad_norm": 0.21291828155517578,
"learning_rate": 4.411727731542937e-06,
"loss": 0.004,
"step": 174
},
{
"epoch": 2.4009216589861753,
"grad_norm": 0.18649035692214966,
"learning_rate": 4.39639814612578e-06,
"loss": 0.0047,
"step": 175
},
{
"epoch": 2.4147465437788016,
"grad_norm": 0.19008278846740723,
"learning_rate": 4.3808987559489536e-06,
"loss": 0.0071,
"step": 176
},
{
"epoch": 2.4285714285714284,
"grad_norm": 0.26282456517219543,
"learning_rate": 4.365230948838232e-06,
"loss": 0.0044,
"step": 177
},
{
"epoch": 2.442396313364055,
"grad_norm": 0.2351403385400772,
"learning_rate": 4.349396127699552e-06,
"loss": 0.0057,
"step": 178
},
{
"epoch": 2.456221198156682,
"grad_norm": 0.20451441407203674,
"learning_rate": 4.3333957103934025e-06,
"loss": 0.003,
"step": 179
},
{
"epoch": 2.4700460829493087,
"grad_norm": 0.22120380401611328,
"learning_rate": 4.317231129607859e-06,
"loss": 0.0045,
"step": 180
},
{
"epoch": 2.4838709677419355,
"grad_norm": 0.18543967604637146,
"learning_rate": 4.30090383273031e-06,
"loss": 0.0062,
"step": 181
},
{
"epoch": 2.4976958525345623,
"grad_norm": 0.18473730981349945,
"learning_rate": 4.2844152817178476e-06,
"loss": 0.0052,
"step": 182
},
{
"epoch": 2.511520737327189,
"grad_norm": 0.21087361872196198,
"learning_rate": 4.267766952966369e-06,
"loss": 0.0041,
"step": 183
},
{
"epoch": 2.525345622119816,
"grad_norm": 0.24977360665798187,
"learning_rate": 4.2509603371783776e-06,
"loss": 0.0053,
"step": 184
},
{
"epoch": 2.539170506912442,
"grad_norm": 0.19377018511295319,
"learning_rate": 4.233996939229502e-06,
"loss": 0.0037,
"step": 185
},
{
"epoch": 2.5529953917050694,
"grad_norm": 0.21130548417568207,
"learning_rate": 4.216878278033753e-06,
"loss": 0.0031,
"step": 186
},
{
"epoch": 2.5668202764976957,
"grad_norm": 0.13288047909736633,
"learning_rate": 4.199605886407515e-06,
"loss": 0.0029,
"step": 187
},
{
"epoch": 2.5806451612903225,
"grad_norm": 0.15998876094818115,
"learning_rate": 4.1821813109322975e-06,
"loss": 0.0031,
"step": 188
},
{
"epoch": 2.5944700460829493,
"grad_norm": 0.19475506246089935,
"learning_rate": 4.164606111816256e-06,
"loss": 0.0022,
"step": 189
},
{
"epoch": 2.608294930875576,
"grad_norm": 0.1446300446987152,
"learning_rate": 4.146881862754485e-06,
"loss": 0.0025,
"step": 190
},
{
"epoch": 2.622119815668203,
"grad_norm": 0.13051164150238037,
"learning_rate": 4.129010150788112e-06,
"loss": 0.0019,
"step": 191
},
{
"epoch": 2.6359447004608296,
"grad_norm": 0.1953984946012497,
"learning_rate": 4.110992576162193e-06,
"loss": 0.0021,
"step": 192
},
{
"epoch": 2.6497695852534564,
"grad_norm": 0.23630598187446594,
"learning_rate": 4.092830752182423e-06,
"loss": 0.002,
"step": 193
},
{
"epoch": 2.6635944700460827,
"grad_norm": 0.2919062376022339,
"learning_rate": 4.074526305070679e-06,
"loss": 0.0017,
"step": 194
},
{
"epoch": 2.6774193548387095,
"grad_norm": 0.22015534341335297,
"learning_rate": 4.056080873819412e-06,
"loss": 0.0025,
"step": 195
},
{
"epoch": 2.6912442396313363,
"grad_norm": 0.9449160099029541,
"learning_rate": 4.037496110044885e-06,
"loss": 0.0024,
"step": 196
},
{
"epoch": 2.705069124423963,
"grad_norm": 0.25235581398010254,
"learning_rate": 4.018773677839289e-06,
"loss": 0.0031,
"step": 197
},
{
"epoch": 2.71889400921659,
"grad_norm": 0.3098089098930359,
"learning_rate": 3.999915253621739e-06,
"loss": 0.0019,
"step": 198
},
{
"epoch": 2.7327188940092166,
"grad_norm": 0.19896291196346283,
"learning_rate": 3.980922525988167e-06,
"loss": 0.0019,
"step": 199
},
{
"epoch": 2.7465437788018434,
"grad_norm": 0.21136268973350525,
"learning_rate": 3.961797195560118e-06,
"loss": 0.0031,
"step": 200
},
{
"epoch": 2.76036866359447,
"grad_norm": 0.2549005150794983,
"learning_rate": 3.942540974832486e-06,
"loss": 0.0017,
"step": 201
},
{
"epoch": 2.774193548387097,
"grad_norm": 0.14762410521507263,
"learning_rate": 3.9231555880201655e-06,
"loss": 0.0022,
"step": 202
},
{
"epoch": 2.7880184331797233,
"grad_norm": 0.16235944628715515,
"learning_rate": 3.903642770903671e-06,
"loss": 0.0012,
"step": 203
},
{
"epoch": 2.80184331797235,
"grad_norm": 0.1506718099117279,
"learning_rate": 3.884004270673711e-06,
"loss": 0.0015,
"step": 204
},
{
"epoch": 2.815668202764977,
"grad_norm": 0.10484135895967484,
"learning_rate": 3.864241845774746e-06,
"loss": 0.0016,
"step": 205
},
{
"epoch": 2.8294930875576036,
"grad_norm": 0.7636306285858154,
"learning_rate": 3.844357265747531e-06,
"loss": 0.0018,
"step": 206
},
{
"epoch": 2.8433179723502304,
"grad_norm": 0.2242082804441452,
"learning_rate": 3.8243523110706736e-06,
"loss": 0.0021,
"step": 207
},
{
"epoch": 2.857142857142857,
"grad_norm": 0.3264133334159851,
"learning_rate": 3.8042287730012117e-06,
"loss": 0.0021,
"step": 208
},
{
"epoch": 2.870967741935484,
"grad_norm": 0.12472204118967056,
"learning_rate": 3.7839884534142157e-06,
"loss": 0.0011,
"step": 209
},
{
"epoch": 2.8847926267281108,
"grad_norm": 0.07526414096355438,
"learning_rate": 3.7636331646414524e-06,
"loss": 0.0017,
"step": 210
},
{
"epoch": 2.8986175115207375,
"grad_norm": 0.16134843230247498,
"learning_rate": 3.7431647293091076e-06,
"loss": 0.0019,
"step": 211
},
{
"epoch": 2.912442396313364,
"grad_norm": 0.14789307117462158,
"learning_rate": 3.7225849801745835e-06,
"loss": 0.0012,
"step": 212
},
{
"epoch": 2.9262672811059907,
"grad_norm": 0.13681238889694214,
"learning_rate": 3.701895759962397e-06,
"loss": 0.0011,
"step": 213
},
{
"epoch": 2.9400921658986174,
"grad_norm": 0.10747735947370529,
"learning_rate": 3.6810989211991777e-06,
"loss": 0.0007,
"step": 214
},
{
"epoch": 2.953917050691244,
"grad_norm": 0.08121375739574432,
"learning_rate": 3.6601963260477923e-06,
"loss": 0.0005,
"step": 215
},
{
"epoch": 2.967741935483871,
"grad_norm": 0.0884300246834755,
"learning_rate": 3.6391898461406045e-06,
"loss": 0.0014,
"step": 216
},
{
"epoch": 2.9815668202764978,
"grad_norm": 0.18539245426654816,
"learning_rate": 3.6180813624118898e-06,
"loss": 0.002,
"step": 217
},
{
"epoch": 2.9953917050691246,
"grad_norm": 0.1257522702217102,
"learning_rate": 3.5968727649294134e-06,
"loss": 0.0015,
"step": 218
},
{
"epoch": 3.0,
"grad_norm": 1.2422555685043335,
"learning_rate": 3.575565952725193e-06,
"loss": 0.0002,
"step": 219
},
{
"epoch": 3.013824884792627,
"grad_norm": 0.06009506434202194,
"learning_rate": 3.55416283362546e-06,
"loss": 0.0003,
"step": 220
},
{
"epoch": 3.0276497695852536,
"grad_norm": 0.0876953974366188,
"learning_rate": 3.5326653240798283e-06,
"loss": 0.0005,
"step": 221
},
{
"epoch": 3.0414746543778803,
"grad_norm": 0.7512914538383484,
"learning_rate": 3.5110753489896924e-06,
"loss": 0.0007,
"step": 222
},
{
"epoch": 3.055299539170507,
"grad_norm": 0.08451899141073227,
"learning_rate": 3.4893948415358803e-06,
"loss": 0.0009,
"step": 223
},
{
"epoch": 3.0691244239631335,
"grad_norm": 0.15445305407047272,
"learning_rate": 3.4676257430055438e-06,
"loss": 0.0006,
"step": 224
},
{
"epoch": 3.0829493087557602,
"grad_norm": 0.07909094542264938,
"learning_rate": 3.4457700026183378e-06,
"loss": 0.0004,
"step": 225
},
{
"epoch": 3.096774193548387,
"grad_norm": 0.03637247905135155,
"learning_rate": 3.4238295773518924e-06,
"loss": 0.0003,
"step": 226
},
{
"epoch": 3.110599078341014,
"grad_norm": 0.203308567404747,
"learning_rate": 3.4018064317665745e-06,
"loss": 0.0003,
"step": 227
},
{
"epoch": 3.1244239631336406,
"grad_norm": 0.03239201754331589,
"learning_rate": 3.3797025378295826e-06,
"loss": 0.0002,
"step": 228
},
{
"epoch": 3.1382488479262673,
"grad_norm": 0.07106538861989975,
"learning_rate": 3.357519874738382e-06,
"loss": 0.0004,
"step": 229
},
{
"epoch": 3.152073732718894,
"grad_norm": 0.048268985003232956,
"learning_rate": 3.3352604287434752e-06,
"loss": 0.0003,
"step": 230
},
{
"epoch": 3.165898617511521,
"grad_norm": 0.0841558575630188,
"learning_rate": 3.31292619297056e-06,
"loss": 0.0003,
"step": 231
},
{
"epoch": 3.1797235023041477,
"grad_norm": 0.07029678672552109,
"learning_rate": 3.29051916724206e-06,
"loss": 0.0003,
"step": 232
},
{
"epoch": 3.193548387096774,
"grad_norm": 0.11369964480400085,
"learning_rate": 3.2680413578980623e-06,
"loss": 0.0014,
"step": 233
},
{
"epoch": 3.207373271889401,
"grad_norm": 0.0367964468896389,
"learning_rate": 3.245494777616664e-06,
"loss": 0.0001,
"step": 234
},
{
"epoch": 3.2211981566820276,
"grad_norm": 0.13746097683906555,
"learning_rate": 3.2228814452337587e-06,
"loss": 0.0003,
"step": 235
},
{
"epoch": 3.2350230414746544,
"grad_norm": 0.09046189486980438,
"learning_rate": 3.2002033855622683e-06,
"loss": 0.0004,
"step": 236
},
{
"epoch": 3.248847926267281,
"grad_norm": 0.04587667062878609,
"learning_rate": 3.177462629210838e-06,
"loss": 0.0002,
"step": 237
},
{
"epoch": 3.262672811059908,
"grad_norm": 0.11323168128728867,
"learning_rate": 3.154661212402017e-06,
"loss": 0.0003,
"step": 238
},
{
"epoch": 3.2764976958525347,
"grad_norm": 0.04728177189826965,
"learning_rate": 3.131801176789934e-06,
"loss": 0.0002,
"step": 239
},
{
"epoch": 3.2903225806451615,
"grad_norm": 0.527999997138977,
"learning_rate": 3.1088845692774798e-06,
"loss": 0.0008,
"step": 240
},
{
"epoch": 3.3041474654377883,
"grad_norm": 0.026646027341485023,
"learning_rate": 3.0859134418330373e-06,
"loss": 0.0001,
"step": 241
},
{
"epoch": 3.3179723502304146,
"grad_norm": 0.057450197637081146,
"learning_rate": 3.0628898513067357e-06,
"loss": 0.0004,
"step": 242
},
{
"epoch": 3.3317972350230414,
"grad_norm": 0.08258494734764099,
"learning_rate": 3.0398158592462847e-06,
"loss": 0.0005,
"step": 243
},
{
"epoch": 3.345622119815668,
"grad_norm": 0.01878846250474453,
"learning_rate": 3.0166935317123824e-06,
"loss": 0.0001,
"step": 244
},
{
"epoch": 3.359447004608295,
"grad_norm": 0.041918545961380005,
"learning_rate": 2.9935249390937184e-06,
"loss": 0.0002,
"step": 245
},
{
"epoch": 3.3732718894009217,
"grad_norm": 0.04018491134047508,
"learning_rate": 2.970312155921585e-06,
"loss": 0.0002,
"step": 246
},
{
"epoch": 3.3870967741935485,
"grad_norm": 0.040825020521879196,
"learning_rate": 2.9470572606841295e-06,
"loss": 0.0002,
"step": 247
},
{
"epoch": 3.4009216589861753,
"grad_norm": 0.050590481609106064,
"learning_rate": 2.9237623356402423e-06,
"loss": 0.0002,
"step": 248
},
{
"epoch": 3.4147465437788016,
"grad_norm": 0.07999978959560394,
"learning_rate": 2.900429466633107e-06,
"loss": 0.0002,
"step": 249
},
{
"epoch": 3.4285714285714284,
"grad_norm": 0.02137935161590576,
"learning_rate": 2.8770607429034352e-06,
"loss": 0.0001,
"step": 250
},
{
"epoch": 3.442396313364055,
"grad_norm": 0.18967340886592865,
"learning_rate": 2.8536582569023964e-06,
"loss": 0.0007,
"step": 251
},
{
"epoch": 3.456221198156682,
"grad_norm": 0.03681226819753647,
"learning_rate": 2.8302241041042564e-06,
"loss": 0.0001,
"step": 252
},
{
"epoch": 3.4700460829493087,
"grad_norm": 0.03142761439085007,
"learning_rate": 2.8067603828187446e-06,
"loss": 0.0001,
"step": 253
},
{
"epoch": 3.4838709677419355,
"grad_norm": 0.11318890005350113,
"learning_rate": 2.7832691940031755e-06,
"loss": 0.0005,
"step": 254
},
{
"epoch": 3.4976958525345623,
"grad_norm": 0.047176819294691086,
"learning_rate": 2.759752641074322e-06,
"loss": 0.0002,
"step": 255
},
{
"epoch": 3.511520737327189,
"grad_norm": 0.0642286092042923,
"learning_rate": 2.7362128297200784e-06,
"loss": 0.0002,
"step": 256
},
{
"epoch": 3.525345622119816,
"grad_norm": 0.09328105300664902,
"learning_rate": 2.712651867710914e-06,
"loss": 0.0004,
"step": 257
},
{
"epoch": 3.539170506912442,
"grad_norm": 0.08150269836187363,
"learning_rate": 2.6890718647111424e-06,
"loss": 0.0007,
"step": 258
},
{
"epoch": 3.5529953917050694,
"grad_norm": 0.03366294875741005,
"learning_rate": 2.665474932090017e-06,
"loss": 0.0001,
"step": 259
},
{
"epoch": 3.5668202764976957,
"grad_norm": 0.032316725701093674,
"learning_rate": 2.6418631827326857e-06,
"loss": 0.0001,
"step": 260
},
{
"epoch": 3.5806451612903225,
"grad_norm": 0.02776617370545864,
"learning_rate": 2.6182387308509927e-06,
"loss": 0.0001,
"step": 261
},
{
"epoch": 3.5944700460829493,
"grad_norm": 0.1258484572172165,
"learning_rate": 2.5946036917941765e-06,
"loss": 0.0003,
"step": 262
},
{
"epoch": 3.608294930875576,
"grad_norm": 0.04412033408880234,
"learning_rate": 2.570960181859458e-06,
"loss": 0.0003,
"step": 263
},
{
"epoch": 3.622119815668203,
"grad_norm": 0.016816483810544014,
"learning_rate": 2.547310318102548e-06,
"loss": 0.0001,
"step": 264
},
{
"epoch": 3.6359447004608296,
"grad_norm": 0.028503524139523506,
"learning_rate": 2.5236562181480794e-06,
"loss": 0.0001,
"step": 265
},
{
"epoch": 3.6497695852534564,
"grad_norm": 0.03991785645484924,
"learning_rate": 2.5e-06,
"loss": 0.0002,
"step": 266
},
{
"epoch": 3.6635944700460827,
"grad_norm": 0.07638856768608093,
"learning_rate": 2.4763437818519205e-06,
"loss": 0.0002,
"step": 267
},
{
"epoch": 3.6774193548387095,
"grad_norm": 0.032387226819992065,
"learning_rate": 2.4526896818974534e-06,
"loss": 0.0002,
"step": 268
},
{
"epoch": 3.6912442396313363,
"grad_norm": 0.035975128412246704,
"learning_rate": 2.429039818140543e-06,
"loss": 0.0002,
"step": 269
},
{
"epoch": 3.705069124423963,
"grad_norm": 0.021173926070332527,
"learning_rate": 2.405396308205825e-06,
"loss": 0.0001,
"step": 270
},
{
"epoch": 3.71889400921659,
"grad_norm": 0.005446314811706543,
"learning_rate": 2.381761269149009e-06,
"loss": 0.0,
"step": 271
},
{
"epoch": 3.7327188940092166,
"grad_norm": 0.04019308090209961,
"learning_rate": 2.358136817267315e-06,
"loss": 0.0001,
"step": 272
},
{
"epoch": 3.7465437788018434,
"grad_norm": 0.0222685057669878,
"learning_rate": 2.334525067909983e-06,
"loss": 0.0001,
"step": 273
},
{
"epoch": 3.76036866359447,
"grad_norm": 0.02486710622906685,
"learning_rate": 2.3109281352888593e-06,
"loss": 0.0001,
"step": 274
},
{
"epoch": 3.774193548387097,
"grad_norm": 0.01929207146167755,
"learning_rate": 2.2873481322890866e-06,
"loss": 0.0001,
"step": 275
},
{
"epoch": 3.7880184331797233,
"grad_norm": 0.010686581023037434,
"learning_rate": 2.263787170279922e-06,
"loss": 0.0,
"step": 276
},
{
"epoch": 3.80184331797235,
"grad_norm": 0.04710806906223297,
"learning_rate": 2.2402473589256793e-06,
"loss": 0.0001,
"step": 277
},
{
"epoch": 3.815668202764977,
"grad_norm": 0.00774085009470582,
"learning_rate": 2.2167308059968258e-06,
"loss": 0.0,
"step": 278
},
{
"epoch": 3.8294930875576036,
"grad_norm": 0.00735470512881875,
"learning_rate": 2.193239617181256e-06,
"loss": 0.0,
"step": 279
},
{
"epoch": 3.8433179723502304,
"grad_norm": 0.005572167690843344,
"learning_rate": 2.169775895895745e-06,
"loss": 0.0,
"step": 280
},
{
"epoch": 3.857142857142857,
"grad_norm": 0.07026448100805283,
"learning_rate": 2.146341743097604e-06,
"loss": 0.0004,
"step": 281
},
{
"epoch": 3.870967741935484,
"grad_norm": 0.03968067839741707,
"learning_rate": 2.1229392570965656e-06,
"loss": 0.0001,
"step": 282
},
{
"epoch": 3.8847926267281108,
"grad_norm": 0.002730958629399538,
"learning_rate": 2.0995705333668948e-06,
"loss": 0.0,
"step": 283
},
{
"epoch": 3.8986175115207375,
"grad_norm": 0.010703709907829762,
"learning_rate": 2.0762376643597586e-06,
"loss": 0.0,
"step": 284
},
{
"epoch": 3.912442396313364,
"grad_norm": 0.03527766093611717,
"learning_rate": 2.0529427393158704e-06,
"loss": 0.0001,
"step": 285
},
{
"epoch": 3.9262672811059907,
"grad_norm": 0.03926033526659012,
"learning_rate": 2.0296878440784164e-06,
"loss": 0.0001,
"step": 286
},
{
"epoch": 3.9400921658986174,
"grad_norm": 0.007335775997489691,
"learning_rate": 2.006475060906283e-06,
"loss": 0.0,
"step": 287
},
{
"epoch": 3.953917050691244,
"grad_norm": 0.005718631204217672,
"learning_rate": 1.9833064682876175e-06,
"loss": 0.0,
"step": 288
},
{
"epoch": 3.967741935483871,
"grad_norm": 0.005941327195614576,
"learning_rate": 1.9601841407537157e-06,
"loss": 0.0,
"step": 289
},
{
"epoch": 3.9815668202764978,
"grad_norm": 0.039281055331230164,
"learning_rate": 1.937110148693265e-06,
"loss": 0.0001,
"step": 290
},
{
"epoch": 3.9953917050691246,
"grad_norm": 0.06976872682571411,
"learning_rate": 1.9140865581669627e-06,
"loss": 0.0001,
"step": 291
},
{
"epoch": 4.0,
"grad_norm": 0.06976872682571411,
"learning_rate": 1.8911154307225204e-06,
"loss": 0.0,
"step": 292
},
{
"epoch": 4.013824884792626,
"grad_norm": 0.005908307619392872,
"learning_rate": 1.8681988232100674e-06,
"loss": 0.0,
"step": 293
},
{
"epoch": 4.027649769585254,
"grad_norm": 0.022486088797450066,
"learning_rate": 1.8453387875979834e-06,
"loss": 0.0001,
"step": 294
},
{
"epoch": 4.04147465437788,
"grad_norm": 0.0074249873869121075,
"learning_rate": 1.822537370789163e-06,
"loss": 0.0,
"step": 295
},
{
"epoch": 4.055299539170507,
"grad_norm": 0.004768090322613716,
"learning_rate": 1.7997966144377328e-06,
"loss": 0.0,
"step": 296
},
{
"epoch": 4.0691244239631335,
"grad_norm": 0.013053408823907375,
"learning_rate": 1.7771185547662417e-06,
"loss": 0.0,
"step": 297
},
{
"epoch": 4.082949308755761,
"grad_norm": 0.00568437110632658,
"learning_rate": 1.754505222383337e-06,
"loss": 0.0,
"step": 298
},
{
"epoch": 4.096774193548387,
"grad_norm": 0.006704168394207954,
"learning_rate": 1.7319586421019383e-06,
"loss": 0.0,
"step": 299
},
{
"epoch": 4.110599078341014,
"grad_norm": 0.0039120810106396675,
"learning_rate": 1.7094808327579401e-06,
"loss": 0.0,
"step": 300
},
{
"epoch": 4.124423963133641,
"grad_norm": 0.009206798858940601,
"learning_rate": 1.6870738070294412e-06,
"loss": 0.0,
"step": 301
},
{
"epoch": 4.138248847926267,
"grad_norm": 0.005304583813995123,
"learning_rate": 1.6647395712565256e-06,
"loss": 0.0,
"step": 302
},
{
"epoch": 4.152073732718894,
"grad_norm": 0.008103611879050732,
"learning_rate": 1.6424801252616186e-06,
"loss": 0.0001,
"step": 303
},
{
"epoch": 4.1658986175115205,
"grad_norm": 0.028891608119010925,
"learning_rate": 1.6202974621704176e-06,
"loss": 0.0,
"step": 304
},
{
"epoch": 4.179723502304148,
"grad_norm": 0.0035763406194746494,
"learning_rate": 1.5981935682334266e-06,
"loss": 0.0,
"step": 305
},
{
"epoch": 4.193548387096774,
"grad_norm": 0.009718772955238819,
"learning_rate": 1.5761704226481078e-06,
"loss": 0.0,
"step": 306
},
{
"epoch": 4.207373271889401,
"grad_norm": 0.01045698020607233,
"learning_rate": 1.5542299973816626e-06,
"loss": 0.0,
"step": 307
},
{
"epoch": 4.221198156682028,
"grad_norm": 0.004575685132294893,
"learning_rate": 1.5323742569944573e-06,
"loss": 0.0,
"step": 308
},
{
"epoch": 4.235023041474655,
"grad_norm": 0.003245371161028743,
"learning_rate": 1.5106051584641208e-06,
"loss": 0.0,
"step": 309
},
{
"epoch": 4.248847926267281,
"grad_norm": 0.005619620904326439,
"learning_rate": 1.4889246510103078e-06,
"loss": 0.0,
"step": 310
},
{
"epoch": 4.2626728110599075,
"grad_norm": 0.004715710878372192,
"learning_rate": 1.4673346759201728e-06,
"loss": 0.0,
"step": 311
},
{
"epoch": 4.276497695852535,
"grad_norm": 0.007476332131773233,
"learning_rate": 1.44583716637454e-06,
"loss": 0.0,
"step": 312
},
{
"epoch": 4.290322580645161,
"grad_norm": 0.01739400625228882,
"learning_rate": 1.4244340472748076e-06,
"loss": 0.0001,
"step": 313
},
{
"epoch": 4.304147465437788,
"grad_norm": 0.00816753227263689,
"learning_rate": 1.403127235070587e-06,
"loss": 0.0,
"step": 314
},
{
"epoch": 4.317972350230415,
"grad_norm": 0.010216044262051582,
"learning_rate": 1.381918637588112e-06,
"loss": 0.0,
"step": 315
},
{
"epoch": 4.331797235023042,
"grad_norm": 0.004990486893802881,
"learning_rate": 1.3608101538593965e-06,
"loss": 0.0,
"step": 316
},
{
"epoch": 4.345622119815668,
"grad_norm": 0.004758649505674839,
"learning_rate": 1.3398036739522088e-06,
"loss": 0.0001,
"step": 317
},
{
"epoch": 4.359447004608295,
"grad_norm": 0.041808340698480606,
"learning_rate": 1.3189010788008234e-06,
"loss": 0.0,
"step": 318
},
{
"epoch": 4.373271889400922,
"grad_norm": 0.012711254879832268,
"learning_rate": 1.2981042400376032e-06,
"loss": 0.0,
"step": 319
},
{
"epoch": 4.387096774193548,
"grad_norm": 0.0035697701387107372,
"learning_rate": 1.277415019825417e-06,
"loss": 0.0,
"step": 320
},
{
"epoch": 4.400921658986175,
"grad_norm": 0.005487007088959217,
"learning_rate": 1.2568352706908937e-06,
"loss": 0.0,
"step": 321
},
{
"epoch": 4.414746543778802,
"grad_norm": 0.01304635126143694,
"learning_rate": 1.2363668353585486e-06,
"loss": 0.0,
"step": 322
},
{
"epoch": 4.428571428571429,
"grad_norm": 0.0019787494093179703,
"learning_rate": 1.216011546585785e-06,
"loss": 0.0,
"step": 323
},
{
"epoch": 4.442396313364055,
"grad_norm": 0.00808583851903677,
"learning_rate": 1.195771226998789e-06,
"loss": 0.0,
"step": 324
},
{
"epoch": 4.456221198156682,
"grad_norm": 0.0022094689775258303,
"learning_rate": 1.1756476889293269e-06,
"loss": 0.0,
"step": 325
},
{
"epoch": 4.470046082949309,
"grad_norm": 0.012792594730854034,
"learning_rate": 1.1556427342524698e-06,
"loss": 0.0,
"step": 326
},
{
"epoch": 4.483870967741936,
"grad_norm": 0.006805351935327053,
"learning_rate": 1.1357581542252555e-06,
"loss": 0.0,
"step": 327
},
{
"epoch": 4.497695852534562,
"grad_norm": 0.003740285988897085,
"learning_rate": 1.1159957293262888e-06,
"loss": 0.0,
"step": 328
},
{
"epoch": 4.511520737327189,
"grad_norm": 0.009705561213195324,
"learning_rate": 1.0963572290963298e-06,
"loss": 0.0,
"step": 329
},
{
"epoch": 4.525345622119816,
"grad_norm": 0.040002401918172836,
"learning_rate": 1.0768444119798357e-06,
"loss": 0.0002,
"step": 330
},
{
"epoch": 4.539170506912442,
"grad_norm": 0.0036789420992136,
"learning_rate": 1.0574590251675145e-06,
"loss": 0.0,
"step": 331
},
{
"epoch": 4.552995391705069,
"grad_norm": 0.004043960478156805,
"learning_rate": 1.0382028044398823e-06,
"loss": 0.0002,
"step": 332
},
{
"epoch": 4.566820276497696,
"grad_norm": 0.0512581467628479,
"learning_rate": 1.0190774740118343e-06,
"loss": 0.0,
"step": 333
},
{
"epoch": 4.580645161290323,
"grad_norm": 0.004926969762891531,
"learning_rate": 1.0000847463782615e-06,
"loss": 0.0,
"step": 334
},
{
"epoch": 4.594470046082949,
"grad_norm": 0.0043294900096952915,
"learning_rate": 9.812263221607114e-07,
"loss": 0.0,
"step": 335
},
{
"epoch": 4.6082949308755765,
"grad_norm": 0.0023195091634988785,
"learning_rate": 9.625038899551162e-07,
"loss": 0.0,
"step": 336
},
{
"epoch": 4.622119815668203,
"grad_norm": 0.0015059575671330094,
"learning_rate": 9.439191261805894e-07,
"loss": 0.0,
"step": 337
},
{
"epoch": 4.635944700460829,
"grad_norm": 0.001368862227536738,
"learning_rate": 9.254736949293216e-07,
"loss": 0.0,
"step": 338
},
{
"epoch": 4.649769585253456,
"grad_norm": 0.008128674700856209,
"learning_rate": 9.07169247817579e-07,
"loss": 0.0,
"step": 339
},
{
"epoch": 4.663594470046083,
"grad_norm": 0.0029226879123598337,
"learning_rate": 8.890074238378074e-07,
"loss": 0.0,
"step": 340
},
{
"epoch": 4.67741935483871,
"grad_norm": 0.0012331035686656833,
"learning_rate": 8.709898492118885e-07,
"loss": 0.0,
"step": 341
},
{
"epoch": 4.691244239631336,
"grad_norm": 0.005286338273435831,
"learning_rate": 8.531181372455161e-07,
"loss": 0.0,
"step": 342
},
{
"epoch": 4.705069124423963,
"grad_norm": 0.0026836844626814127,
"learning_rate": 8.353938881837445e-07,
"loss": 0.0,
"step": 343
},
{
"epoch": 4.71889400921659,
"grad_norm": 0.013100259937345982,
"learning_rate": 8.178186890677029e-07,
"loss": 0.0,
"step": 344
},
{
"epoch": 4.732718894009217,
"grad_norm": 0.005650435108691454,
"learning_rate": 8.003941135924859e-07,
"loss": 0.0,
"step": 345
},
{
"epoch": 4.746543778801843,
"grad_norm": 0.007480297237634659,
"learning_rate": 7.83121721966248e-07,
"loss": 0.0,
"step": 346
},
{
"epoch": 4.76036866359447,
"grad_norm": 0.014115474186837673,
"learning_rate": 7.66003060770498e-07,
"loss": 0.0,
"step": 347
},
{
"epoch": 4.774193548387097,
"grad_norm": 0.0011564996093511581,
"learning_rate": 7.490396628216237e-07,
"loss": 0.0,
"step": 348
},
{
"epoch": 4.788018433179723,
"grad_norm": 0.01101834885776043,
"learning_rate": 7.322330470336314e-07,
"loss": 0.0,
"step": 349
},
{
"epoch": 4.8018433179723505,
"grad_norm": 0.003535608062520623,
"learning_rate": 7.155847182821524e-07,
"loss": 0.0,
"step": 350
},
{
"epoch": 4.815668202764977,
"grad_norm": 0.03185940906405449,
"learning_rate": 6.990961672696908e-07,
"loss": 0.0001,
"step": 351
},
{
"epoch": 4.829493087557603,
"grad_norm": 0.006516721565276384,
"learning_rate": 6.827688703921407e-07,
"loss": 0.0,
"step": 352
},
{
"epoch": 4.84331797235023,
"grad_norm": 0.008277276530861855,
"learning_rate": 6.666042896065983e-07,
"loss": 0.0,
"step": 353
},
{
"epoch": 4.857142857142857,
"grad_norm": 0.00266360049135983,
"learning_rate": 6.506038723004484e-07,
"loss": 0.0,
"step": 354
},
{
"epoch": 4.870967741935484,
"grad_norm": 0.01671386882662773,
"learning_rate": 6.347690511617693e-07,
"loss": 0.0,
"step": 355
},
{
"epoch": 4.88479262672811,
"grad_norm": 0.013981528580188751,
"learning_rate": 6.191012440510469e-07,
"loss": 0.0001,
"step": 356
},
{
"epoch": 4.8986175115207375,
"grad_norm": 0.02350999414920807,
"learning_rate": 6.036018538742208e-07,
"loss": 0.0,
"step": 357
},
{
"epoch": 4.912442396313364,
"grad_norm": 0.004093356896191835,
"learning_rate": 5.882722684570638e-07,
"loss": 0.0,
"step": 358
},
{
"epoch": 4.926267281105991,
"grad_norm": 0.00656296918168664,
"learning_rate": 5.731138604209169e-07,
"loss": 0.0,
"step": 359
},
{
"epoch": 4.940092165898617,
"grad_norm": 0.002148544415831566,
"learning_rate": 5.581279870597866e-07,
"loss": 0.0,
"step": 360
}
],
"logging_steps": 1,
"max_steps": 432,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 72,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 2.298790820660537e+19,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}