stage1_11k_2epoch / trainer_state.json
Racktic's picture
Initial model upload
eb711f8 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 500,
"global_step": 704,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.002840909090909091,
"grad_norm": 5.7846018213668176,
"learning_rate": 0.0,
"loss": 0.9187,
"step": 1
},
{
"epoch": 0.005681818181818182,
"grad_norm": 5.6013787706769165,
"learning_rate": 5.681818181818182e-08,
"loss": 0.9337,
"step": 2
},
{
"epoch": 0.008522727272727272,
"grad_norm": 5.628576098877568,
"learning_rate": 1.1363636363636364e-07,
"loss": 0.9727,
"step": 3
},
{
"epoch": 0.011363636363636364,
"grad_norm": 5.6880005354310885,
"learning_rate": 1.7045454545454545e-07,
"loss": 0.9255,
"step": 4
},
{
"epoch": 0.014204545454545454,
"grad_norm": 5.1670651022067124,
"learning_rate": 2.2727272727272729e-07,
"loss": 0.9236,
"step": 5
},
{
"epoch": 0.017045454545454544,
"grad_norm": 5.568976898855187,
"learning_rate": 2.840909090909091e-07,
"loss": 0.9168,
"step": 6
},
{
"epoch": 0.019886363636363636,
"grad_norm": 5.471309616694239,
"learning_rate": 3.409090909090909e-07,
"loss": 0.9448,
"step": 7
},
{
"epoch": 0.022727272727272728,
"grad_norm": 4.958288778905564,
"learning_rate": 3.9772727272727276e-07,
"loss": 0.8793,
"step": 8
},
{
"epoch": 0.02556818181818182,
"grad_norm": 4.933702183083632,
"learning_rate": 4.5454545454545457e-07,
"loss": 0.8895,
"step": 9
},
{
"epoch": 0.028409090909090908,
"grad_norm": 5.2766663694969616,
"learning_rate": 5.113636363636364e-07,
"loss": 0.9382,
"step": 10
},
{
"epoch": 0.03125,
"grad_norm": 5.038528653630425,
"learning_rate": 5.681818181818182e-07,
"loss": 0.9115,
"step": 11
},
{
"epoch": 0.03409090909090909,
"grad_norm": 5.232611689675317,
"learning_rate": 6.25e-07,
"loss": 0.9292,
"step": 12
},
{
"epoch": 0.036931818181818184,
"grad_norm": 4.988139455338321,
"learning_rate": 6.818181818181818e-07,
"loss": 0.9014,
"step": 13
},
{
"epoch": 0.03977272727272727,
"grad_norm": 4.309132705290965,
"learning_rate": 7.386363636363638e-07,
"loss": 0.8873,
"step": 14
},
{
"epoch": 0.04261363636363636,
"grad_norm": 4.307584281122824,
"learning_rate": 7.954545454545455e-07,
"loss": 0.8333,
"step": 15
},
{
"epoch": 0.045454545454545456,
"grad_norm": 4.26307165025195,
"learning_rate": 8.522727272727273e-07,
"loss": 0.9033,
"step": 16
},
{
"epoch": 0.048295454545454544,
"grad_norm": 4.39034120717091,
"learning_rate": 9.090909090909091e-07,
"loss": 0.8635,
"step": 17
},
{
"epoch": 0.05113636363636364,
"grad_norm": 3.84235956788758,
"learning_rate": 9.65909090909091e-07,
"loss": 0.8446,
"step": 18
},
{
"epoch": 0.05397727272727273,
"grad_norm": 3.6266638353706115,
"learning_rate": 1.0227272727272729e-06,
"loss": 0.8415,
"step": 19
},
{
"epoch": 0.056818181818181816,
"grad_norm": 3.476359615290905,
"learning_rate": 1.0795454545454546e-06,
"loss": 0.8419,
"step": 20
},
{
"epoch": 0.05965909090909091,
"grad_norm": 3.156716131666368,
"learning_rate": 1.1363636363636364e-06,
"loss": 0.8403,
"step": 21
},
{
"epoch": 0.0625,
"grad_norm": 2.9151648586315138,
"learning_rate": 1.1931818181818183e-06,
"loss": 0.8146,
"step": 22
},
{
"epoch": 0.06534090909090909,
"grad_norm": 2.6691014608856354,
"learning_rate": 1.25e-06,
"loss": 0.7736,
"step": 23
},
{
"epoch": 0.06818181818181818,
"grad_norm": 2.573471865342396,
"learning_rate": 1.3068181818181819e-06,
"loss": 0.805,
"step": 24
},
{
"epoch": 0.07102272727272728,
"grad_norm": 2.478042571741361,
"learning_rate": 1.3636363636363636e-06,
"loss": 0.7884,
"step": 25
},
{
"epoch": 0.07386363636363637,
"grad_norm": 2.681597923455157,
"learning_rate": 1.4204545454545458e-06,
"loss": 0.7906,
"step": 26
},
{
"epoch": 0.07670454545454546,
"grad_norm": 3.5128502346347874,
"learning_rate": 1.4772727272727275e-06,
"loss": 0.8005,
"step": 27
},
{
"epoch": 0.07954545454545454,
"grad_norm": 3.0997314079873997,
"learning_rate": 1.5340909090909093e-06,
"loss": 0.7681,
"step": 28
},
{
"epoch": 0.08238636363636363,
"grad_norm": 3.301016199914052,
"learning_rate": 1.590909090909091e-06,
"loss": 0.7411,
"step": 29
},
{
"epoch": 0.08522727272727272,
"grad_norm": 3.0132955912093293,
"learning_rate": 1.6477272727272728e-06,
"loss": 0.7587,
"step": 30
},
{
"epoch": 0.08806818181818182,
"grad_norm": 2.389542355785432,
"learning_rate": 1.7045454545454546e-06,
"loss": 0.7115,
"step": 31
},
{
"epoch": 0.09090909090909091,
"grad_norm": 2.651871278037176,
"learning_rate": 1.7613636363636365e-06,
"loss": 0.7563,
"step": 32
},
{
"epoch": 0.09375,
"grad_norm": 2.164829082034884,
"learning_rate": 1.8181818181818183e-06,
"loss": 0.7599,
"step": 33
},
{
"epoch": 0.09659090909090909,
"grad_norm": 1.9014357867340361,
"learning_rate": 1.8750000000000003e-06,
"loss": 0.6873,
"step": 34
},
{
"epoch": 0.09943181818181818,
"grad_norm": 1.7872751567008667,
"learning_rate": 1.931818181818182e-06,
"loss": 0.7364,
"step": 35
},
{
"epoch": 0.10227272727272728,
"grad_norm": 2.0723837790108677,
"learning_rate": 1.9886363636363638e-06,
"loss": 0.6644,
"step": 36
},
{
"epoch": 0.10511363636363637,
"grad_norm": 2.1307866633916137,
"learning_rate": 2.0454545454545457e-06,
"loss": 0.6943,
"step": 37
},
{
"epoch": 0.10795454545454546,
"grad_norm": 2.2707754760571093,
"learning_rate": 2.1022727272727277e-06,
"loss": 0.6941,
"step": 38
},
{
"epoch": 0.11079545454545454,
"grad_norm": 2.458749073452043,
"learning_rate": 2.1590909090909092e-06,
"loss": 0.6788,
"step": 39
},
{
"epoch": 0.11363636363636363,
"grad_norm": 2.042545090231337,
"learning_rate": 2.2159090909090912e-06,
"loss": 0.6434,
"step": 40
},
{
"epoch": 0.11647727272727272,
"grad_norm": 1.8580077128269024,
"learning_rate": 2.2727272727272728e-06,
"loss": 0.6573,
"step": 41
},
{
"epoch": 0.11931818181818182,
"grad_norm": 1.8639374232639823,
"learning_rate": 2.3295454545454547e-06,
"loss": 0.6623,
"step": 42
},
{
"epoch": 0.12215909090909091,
"grad_norm": 1.7743666280182162,
"learning_rate": 2.3863636363636367e-06,
"loss": 0.6453,
"step": 43
},
{
"epoch": 0.125,
"grad_norm": 1.7689256446618578,
"learning_rate": 2.4431818181818182e-06,
"loss": 0.6379,
"step": 44
},
{
"epoch": 0.1278409090909091,
"grad_norm": 1.763969205340361,
"learning_rate": 2.5e-06,
"loss": 0.6751,
"step": 45
},
{
"epoch": 0.13068181818181818,
"grad_norm": 2.0371464953854543,
"learning_rate": 2.556818181818182e-06,
"loss": 0.6405,
"step": 46
},
{
"epoch": 0.13352272727272727,
"grad_norm": 1.7777840033046852,
"learning_rate": 2.6136363636363637e-06,
"loss": 0.6252,
"step": 47
},
{
"epoch": 0.13636363636363635,
"grad_norm": 1.7117349580502965,
"learning_rate": 2.6704545454545457e-06,
"loss": 0.6364,
"step": 48
},
{
"epoch": 0.13920454545454544,
"grad_norm": 1.6579980263734242,
"learning_rate": 2.7272727272727272e-06,
"loss": 0.6876,
"step": 49
},
{
"epoch": 0.14204545454545456,
"grad_norm": 1.645561441261347,
"learning_rate": 2.784090909090909e-06,
"loss": 0.6562,
"step": 50
},
{
"epoch": 0.14488636363636365,
"grad_norm": 1.6373402368183252,
"learning_rate": 2.8409090909090916e-06,
"loss": 0.5826,
"step": 51
},
{
"epoch": 0.14772727272727273,
"grad_norm": 1.6128855461127896,
"learning_rate": 2.897727272727273e-06,
"loss": 0.6826,
"step": 52
},
{
"epoch": 0.15056818181818182,
"grad_norm": 1.640865236871538,
"learning_rate": 2.954545454545455e-06,
"loss": 0.7155,
"step": 53
},
{
"epoch": 0.1534090909090909,
"grad_norm": 1.4720357466178808,
"learning_rate": 3.0113636363636366e-06,
"loss": 0.6204,
"step": 54
},
{
"epoch": 0.15625,
"grad_norm": 1.472120144942263,
"learning_rate": 3.0681818181818186e-06,
"loss": 0.6389,
"step": 55
},
{
"epoch": 0.1590909090909091,
"grad_norm": 1.449589289015899,
"learning_rate": 3.125e-06,
"loss": 0.5947,
"step": 56
},
{
"epoch": 0.16193181818181818,
"grad_norm": 1.5010999353034318,
"learning_rate": 3.181818181818182e-06,
"loss": 0.621,
"step": 57
},
{
"epoch": 0.16477272727272727,
"grad_norm": 1.8187567778405078,
"learning_rate": 3.2386363636363637e-06,
"loss": 0.6318,
"step": 58
},
{
"epoch": 0.16761363636363635,
"grad_norm": 1.6541144238811112,
"learning_rate": 3.2954545454545456e-06,
"loss": 0.5619,
"step": 59
},
{
"epoch": 0.17045454545454544,
"grad_norm": 1.6963775548673772,
"learning_rate": 3.352272727272727e-06,
"loss": 0.5823,
"step": 60
},
{
"epoch": 0.17329545454545456,
"grad_norm": 1.4964288240432566,
"learning_rate": 3.409090909090909e-06,
"loss": 0.5662,
"step": 61
},
{
"epoch": 0.17613636363636365,
"grad_norm": 1.6832346234487932,
"learning_rate": 3.4659090909090915e-06,
"loss": 0.6159,
"step": 62
},
{
"epoch": 0.17897727272727273,
"grad_norm": 1.5799422346591692,
"learning_rate": 3.522727272727273e-06,
"loss": 0.6365,
"step": 63
},
{
"epoch": 0.18181818181818182,
"grad_norm": 1.6582299717205322,
"learning_rate": 3.579545454545455e-06,
"loss": 0.647,
"step": 64
},
{
"epoch": 0.1846590909090909,
"grad_norm": 1.800280271805853,
"learning_rate": 3.6363636363636366e-06,
"loss": 0.6799,
"step": 65
},
{
"epoch": 0.1875,
"grad_norm": 1.7351635081451664,
"learning_rate": 3.6931818181818186e-06,
"loss": 0.6107,
"step": 66
},
{
"epoch": 0.1903409090909091,
"grad_norm": 1.5238122131962493,
"learning_rate": 3.7500000000000005e-06,
"loss": 0.6242,
"step": 67
},
{
"epoch": 0.19318181818181818,
"grad_norm": 1.6460555069181215,
"learning_rate": 3.806818181818182e-06,
"loss": 0.6193,
"step": 68
},
{
"epoch": 0.19602272727272727,
"grad_norm": 1.6136927208782876,
"learning_rate": 3.863636363636364e-06,
"loss": 0.6163,
"step": 69
},
{
"epoch": 0.19886363636363635,
"grad_norm": 1.6283580159831965,
"learning_rate": 3.9204545454545456e-06,
"loss": 0.5589,
"step": 70
},
{
"epoch": 0.20170454545454544,
"grad_norm": 1.6416288138229345,
"learning_rate": 3.9772727272727275e-06,
"loss": 0.5439,
"step": 71
},
{
"epoch": 0.20454545454545456,
"grad_norm": 1.7429357324390875,
"learning_rate": 4.0340909090909095e-06,
"loss": 0.647,
"step": 72
},
{
"epoch": 0.20738636363636365,
"grad_norm": 1.491759374118448,
"learning_rate": 4.0909090909090915e-06,
"loss": 0.5954,
"step": 73
},
{
"epoch": 0.21022727272727273,
"grad_norm": 1.5655918386388736,
"learning_rate": 4.1477272727272734e-06,
"loss": 0.5902,
"step": 74
},
{
"epoch": 0.21306818181818182,
"grad_norm": 1.6576527986240395,
"learning_rate": 4.204545454545455e-06,
"loss": 0.5856,
"step": 75
},
{
"epoch": 0.2159090909090909,
"grad_norm": 1.6628001520235092,
"learning_rate": 4.2613636363636365e-06,
"loss": 0.5808,
"step": 76
},
{
"epoch": 0.21875,
"grad_norm": 1.6620023765271195,
"learning_rate": 4.3181818181818185e-06,
"loss": 0.5992,
"step": 77
},
{
"epoch": 0.2215909090909091,
"grad_norm": 1.4886709042670327,
"learning_rate": 4.3750000000000005e-06,
"loss": 0.6091,
"step": 78
},
{
"epoch": 0.22443181818181818,
"grad_norm": 1.4251511789149547,
"learning_rate": 4.4318181818181824e-06,
"loss": 0.5867,
"step": 79
},
{
"epoch": 0.22727272727272727,
"grad_norm": 1.5633386386066466,
"learning_rate": 4.4886363636363636e-06,
"loss": 0.591,
"step": 80
},
{
"epoch": 0.23011363636363635,
"grad_norm": 1.5634040607472635,
"learning_rate": 4.5454545454545455e-06,
"loss": 0.5265,
"step": 81
},
{
"epoch": 0.23295454545454544,
"grad_norm": 1.6229601346591955,
"learning_rate": 4.6022727272727275e-06,
"loss": 0.5428,
"step": 82
},
{
"epoch": 0.23579545454545456,
"grad_norm": 1.5566532175674166,
"learning_rate": 4.6590909090909095e-06,
"loss": 0.5638,
"step": 83
},
{
"epoch": 0.23863636363636365,
"grad_norm": 1.593543335185521,
"learning_rate": 4.715909090909091e-06,
"loss": 0.5767,
"step": 84
},
{
"epoch": 0.24147727272727273,
"grad_norm": 1.449398786815831,
"learning_rate": 4.772727272727273e-06,
"loss": 0.5628,
"step": 85
},
{
"epoch": 0.24431818181818182,
"grad_norm": 1.5387043103946096,
"learning_rate": 4.829545454545455e-06,
"loss": 0.5735,
"step": 86
},
{
"epoch": 0.2471590909090909,
"grad_norm": 1.6763905573278226,
"learning_rate": 4.8863636363636365e-06,
"loss": 0.6032,
"step": 87
},
{
"epoch": 0.25,
"grad_norm": 1.5583409873897445,
"learning_rate": 4.9431818181818184e-06,
"loss": 0.623,
"step": 88
},
{
"epoch": 0.2528409090909091,
"grad_norm": 1.6187547313549033,
"learning_rate": 5e-06,
"loss": 0.5804,
"step": 89
},
{
"epoch": 0.2556818181818182,
"grad_norm": 1.4963976477876901,
"learning_rate": 5.056818181818182e-06,
"loss": 0.5697,
"step": 90
},
{
"epoch": 0.2585227272727273,
"grad_norm": 1.5131795004333568,
"learning_rate": 5.113636363636364e-06,
"loss": 0.5565,
"step": 91
},
{
"epoch": 0.26136363636363635,
"grad_norm": 1.5548395196775626,
"learning_rate": 5.170454545454546e-06,
"loss": 0.5461,
"step": 92
},
{
"epoch": 0.26420454545454547,
"grad_norm": 1.552959319249568,
"learning_rate": 5.2272727272727274e-06,
"loss": 0.5643,
"step": 93
},
{
"epoch": 0.26704545454545453,
"grad_norm": 1.6047709356284223,
"learning_rate": 5.28409090909091e-06,
"loss": 0.5798,
"step": 94
},
{
"epoch": 0.26988636363636365,
"grad_norm": 1.948509778915627,
"learning_rate": 5.340909090909091e-06,
"loss": 0.5327,
"step": 95
},
{
"epoch": 0.2727272727272727,
"grad_norm": 1.5318432235348463,
"learning_rate": 5.397727272727273e-06,
"loss": 0.5541,
"step": 96
},
{
"epoch": 0.2755681818181818,
"grad_norm": 1.5208133763810194,
"learning_rate": 5.4545454545454545e-06,
"loss": 0.529,
"step": 97
},
{
"epoch": 0.2784090909090909,
"grad_norm": 1.6124894417629865,
"learning_rate": 5.511363636363637e-06,
"loss": 0.5464,
"step": 98
},
{
"epoch": 0.28125,
"grad_norm": 1.7110965153428093,
"learning_rate": 5.568181818181818e-06,
"loss": 0.534,
"step": 99
},
{
"epoch": 0.2840909090909091,
"grad_norm": 1.658571452279957,
"learning_rate": 5.625e-06,
"loss": 0.6114,
"step": 100
},
{
"epoch": 0.2869318181818182,
"grad_norm": 1.47658441902133,
"learning_rate": 5.681818181818183e-06,
"loss": 0.5547,
"step": 101
},
{
"epoch": 0.2897727272727273,
"grad_norm": 1.5722026271709708,
"learning_rate": 5.738636363636364e-06,
"loss": 0.5702,
"step": 102
},
{
"epoch": 0.29261363636363635,
"grad_norm": 1.6209365630457135,
"learning_rate": 5.795454545454546e-06,
"loss": 0.5728,
"step": 103
},
{
"epoch": 0.29545454545454547,
"grad_norm": 1.6904389396003674,
"learning_rate": 5.852272727272727e-06,
"loss": 0.5769,
"step": 104
},
{
"epoch": 0.29829545454545453,
"grad_norm": 1.5294499189980586,
"learning_rate": 5.90909090909091e-06,
"loss": 0.5656,
"step": 105
},
{
"epoch": 0.30113636363636365,
"grad_norm": 1.5916214451637154,
"learning_rate": 5.965909090909091e-06,
"loss": 0.5088,
"step": 106
},
{
"epoch": 0.3039772727272727,
"grad_norm": 1.6347827282817056,
"learning_rate": 6.022727272727273e-06,
"loss": 0.5828,
"step": 107
},
{
"epoch": 0.3068181818181818,
"grad_norm": 1.5618332724517157,
"learning_rate": 6.079545454545454e-06,
"loss": 0.5231,
"step": 108
},
{
"epoch": 0.3096590909090909,
"grad_norm": 1.6485262715983184,
"learning_rate": 6.136363636363637e-06,
"loss": 0.538,
"step": 109
},
{
"epoch": 0.3125,
"grad_norm": 1.7278179784286467,
"learning_rate": 6.193181818181818e-06,
"loss": 0.5431,
"step": 110
},
{
"epoch": 0.3153409090909091,
"grad_norm": 1.4942725054179251,
"learning_rate": 6.25e-06,
"loss": 0.5042,
"step": 111
},
{
"epoch": 0.3181818181818182,
"grad_norm": 1.6979365186765227,
"learning_rate": 6.306818181818183e-06,
"loss": 0.5677,
"step": 112
},
{
"epoch": 0.3210227272727273,
"grad_norm": 1.8418176894538365,
"learning_rate": 6.363636363636364e-06,
"loss": 0.588,
"step": 113
},
{
"epoch": 0.32386363636363635,
"grad_norm": 1.640630991484406,
"learning_rate": 6.420454545454546e-06,
"loss": 0.5447,
"step": 114
},
{
"epoch": 0.32670454545454547,
"grad_norm": 1.6066554835877627,
"learning_rate": 6.477272727272727e-06,
"loss": 0.581,
"step": 115
},
{
"epoch": 0.32954545454545453,
"grad_norm": 1.8057754828363723,
"learning_rate": 6.53409090909091e-06,
"loss": 0.5621,
"step": 116
},
{
"epoch": 0.33238636363636365,
"grad_norm": 1.711754276144589,
"learning_rate": 6.590909090909091e-06,
"loss": 0.5918,
"step": 117
},
{
"epoch": 0.3352272727272727,
"grad_norm": 1.517896860756366,
"learning_rate": 6.647727272727273e-06,
"loss": 0.5693,
"step": 118
},
{
"epoch": 0.3380681818181818,
"grad_norm": 1.5312310643773122,
"learning_rate": 6.704545454545454e-06,
"loss": 0.5473,
"step": 119
},
{
"epoch": 0.3409090909090909,
"grad_norm": 1.6182714991455485,
"learning_rate": 6.761363636363637e-06,
"loss": 0.5566,
"step": 120
},
{
"epoch": 0.34375,
"grad_norm": 1.6852771913947449,
"learning_rate": 6.818181818181818e-06,
"loss": 0.5547,
"step": 121
},
{
"epoch": 0.3465909090909091,
"grad_norm": 1.5897680070295204,
"learning_rate": 6.875e-06,
"loss": 0.5691,
"step": 122
},
{
"epoch": 0.3494318181818182,
"grad_norm": 1.7417266228377517,
"learning_rate": 6.931818181818183e-06,
"loss": 0.5362,
"step": 123
},
{
"epoch": 0.3522727272727273,
"grad_norm": 1.5989703470783707,
"learning_rate": 6.988636363636364e-06,
"loss": 0.5282,
"step": 124
},
{
"epoch": 0.35511363636363635,
"grad_norm": 1.6580595085217762,
"learning_rate": 7.045454545454546e-06,
"loss": 0.4953,
"step": 125
},
{
"epoch": 0.35795454545454547,
"grad_norm": 1.689844091653614,
"learning_rate": 7.102272727272727e-06,
"loss": 0.5467,
"step": 126
},
{
"epoch": 0.36079545454545453,
"grad_norm": 1.8347469318031917,
"learning_rate": 7.15909090909091e-06,
"loss": 0.536,
"step": 127
},
{
"epoch": 0.36363636363636365,
"grad_norm": 1.5529169597119448,
"learning_rate": 7.215909090909091e-06,
"loss": 0.524,
"step": 128
},
{
"epoch": 0.3664772727272727,
"grad_norm": 1.6645761334595488,
"learning_rate": 7.272727272727273e-06,
"loss": 0.5332,
"step": 129
},
{
"epoch": 0.3693181818181818,
"grad_norm": 1.7305662363325451,
"learning_rate": 7.329545454545455e-06,
"loss": 0.5219,
"step": 130
},
{
"epoch": 0.3721590909090909,
"grad_norm": 1.5405083703593692,
"learning_rate": 7.386363636363637e-06,
"loss": 0.503,
"step": 131
},
{
"epoch": 0.375,
"grad_norm": 1.5865801365534873,
"learning_rate": 7.443181818181818e-06,
"loss": 0.5625,
"step": 132
},
{
"epoch": 0.3778409090909091,
"grad_norm": 1.782393408869917,
"learning_rate": 7.500000000000001e-06,
"loss": 0.5186,
"step": 133
},
{
"epoch": 0.3806818181818182,
"grad_norm": 1.8819124061481232,
"learning_rate": 7.556818181818183e-06,
"loss": 0.5932,
"step": 134
},
{
"epoch": 0.3835227272727273,
"grad_norm": 1.614587862077087,
"learning_rate": 7.613636363636364e-06,
"loss": 0.5156,
"step": 135
},
{
"epoch": 0.38636363636363635,
"grad_norm": 1.6896450400418643,
"learning_rate": 7.670454545454547e-06,
"loss": 0.5428,
"step": 136
},
{
"epoch": 0.38920454545454547,
"grad_norm": 1.648603590044125,
"learning_rate": 7.727272727272727e-06,
"loss": 0.5574,
"step": 137
},
{
"epoch": 0.39204545454545453,
"grad_norm": 1.686987993970066,
"learning_rate": 7.784090909090911e-06,
"loss": 0.4862,
"step": 138
},
{
"epoch": 0.39488636363636365,
"grad_norm": 1.7918189069952541,
"learning_rate": 7.840909090909091e-06,
"loss": 0.5527,
"step": 139
},
{
"epoch": 0.3977272727272727,
"grad_norm": 1.733609620505762,
"learning_rate": 7.897727272727273e-06,
"loss": 0.5896,
"step": 140
},
{
"epoch": 0.4005681818181818,
"grad_norm": 1.7767681019689756,
"learning_rate": 7.954545454545455e-06,
"loss": 0.5337,
"step": 141
},
{
"epoch": 0.4034090909090909,
"grad_norm": 1.62706994093177,
"learning_rate": 8.011363636363637e-06,
"loss": 0.529,
"step": 142
},
{
"epoch": 0.40625,
"grad_norm": 1.6190932829360436,
"learning_rate": 8.068181818181819e-06,
"loss": 0.5471,
"step": 143
},
{
"epoch": 0.4090909090909091,
"grad_norm": 1.4581793842750983,
"learning_rate": 8.125000000000001e-06,
"loss": 0.5353,
"step": 144
},
{
"epoch": 0.4119318181818182,
"grad_norm": 1.5106234263427771,
"learning_rate": 8.181818181818183e-06,
"loss": 0.5934,
"step": 145
},
{
"epoch": 0.4147727272727273,
"grad_norm": 1.5456174591730625,
"learning_rate": 8.238636363636365e-06,
"loss": 0.5055,
"step": 146
},
{
"epoch": 0.41761363636363635,
"grad_norm": 1.5169605544770564,
"learning_rate": 8.295454545454547e-06,
"loss": 0.5273,
"step": 147
},
{
"epoch": 0.42045454545454547,
"grad_norm": 1.6894139710313294,
"learning_rate": 8.352272727272727e-06,
"loss": 0.4984,
"step": 148
},
{
"epoch": 0.42329545454545453,
"grad_norm": 1.447004877728819,
"learning_rate": 8.40909090909091e-06,
"loss": 0.5616,
"step": 149
},
{
"epoch": 0.42613636363636365,
"grad_norm": 1.4673916601494148,
"learning_rate": 8.465909090909091e-06,
"loss": 0.5056,
"step": 150
},
{
"epoch": 0.4289772727272727,
"grad_norm": 1.524122665837613,
"learning_rate": 8.522727272727273e-06,
"loss": 0.5458,
"step": 151
},
{
"epoch": 0.4318181818181818,
"grad_norm": 1.6973227539753544,
"learning_rate": 8.579545454545455e-06,
"loss": 0.4954,
"step": 152
},
{
"epoch": 0.4346590909090909,
"grad_norm": 1.6229879362305883,
"learning_rate": 8.636363636363637e-06,
"loss": 0.531,
"step": 153
},
{
"epoch": 0.4375,
"grad_norm": 1.6650450560733285,
"learning_rate": 8.693181818181819e-06,
"loss": 0.4925,
"step": 154
},
{
"epoch": 0.4403409090909091,
"grad_norm": 1.52976091657608,
"learning_rate": 8.750000000000001e-06,
"loss": 0.4879,
"step": 155
},
{
"epoch": 0.4431818181818182,
"grad_norm": 1.5124703795369776,
"learning_rate": 8.806818181818183e-06,
"loss": 0.5203,
"step": 156
},
{
"epoch": 0.4460227272727273,
"grad_norm": 1.5985625351303485,
"learning_rate": 8.863636363636365e-06,
"loss": 0.6027,
"step": 157
},
{
"epoch": 0.44886363636363635,
"grad_norm": 1.588345112852916,
"learning_rate": 8.920454545454547e-06,
"loss": 0.5724,
"step": 158
},
{
"epoch": 0.45170454545454547,
"grad_norm": 1.7377485705572997,
"learning_rate": 8.977272727272727e-06,
"loss": 0.5222,
"step": 159
},
{
"epoch": 0.45454545454545453,
"grad_norm": 1.7413287692817025,
"learning_rate": 9.03409090909091e-06,
"loss": 0.5237,
"step": 160
},
{
"epoch": 0.45738636363636365,
"grad_norm": 1.7851847389488553,
"learning_rate": 9.090909090909091e-06,
"loss": 0.5663,
"step": 161
},
{
"epoch": 0.4602272727272727,
"grad_norm": 1.6153479555325219,
"learning_rate": 9.147727272727273e-06,
"loss": 0.5366,
"step": 162
},
{
"epoch": 0.4630681818181818,
"grad_norm": 1.637251170315082,
"learning_rate": 9.204545454545455e-06,
"loss": 0.5531,
"step": 163
},
{
"epoch": 0.4659090909090909,
"grad_norm": 1.6749216054319256,
"learning_rate": 9.261363636363637e-06,
"loss": 0.5513,
"step": 164
},
{
"epoch": 0.46875,
"grad_norm": 1.762177071370672,
"learning_rate": 9.318181818181819e-06,
"loss": 0.5164,
"step": 165
},
{
"epoch": 0.4715909090909091,
"grad_norm": 1.5634911246597942,
"learning_rate": 9.375000000000001e-06,
"loss": 0.4601,
"step": 166
},
{
"epoch": 0.4744318181818182,
"grad_norm": 1.5530286654821466,
"learning_rate": 9.431818181818183e-06,
"loss": 0.5434,
"step": 167
},
{
"epoch": 0.4772727272727273,
"grad_norm": 1.4890754991029818,
"learning_rate": 9.488636363636365e-06,
"loss": 0.5621,
"step": 168
},
{
"epoch": 0.48011363636363635,
"grad_norm": 1.545653087393646,
"learning_rate": 9.545454545454547e-06,
"loss": 0.573,
"step": 169
},
{
"epoch": 0.48295454545454547,
"grad_norm": 1.6628595064300078,
"learning_rate": 9.602272727272727e-06,
"loss": 0.5293,
"step": 170
},
{
"epoch": 0.48579545454545453,
"grad_norm": 1.6199990435876728,
"learning_rate": 9.65909090909091e-06,
"loss": 0.5347,
"step": 171
},
{
"epoch": 0.48863636363636365,
"grad_norm": 1.7032779971266183,
"learning_rate": 9.715909090909091e-06,
"loss": 0.5563,
"step": 172
},
{
"epoch": 0.4914772727272727,
"grad_norm": 1.5057928727458723,
"learning_rate": 9.772727272727273e-06,
"loss": 0.5441,
"step": 173
},
{
"epoch": 0.4943181818181818,
"grad_norm": 1.7930017997140106,
"learning_rate": 9.829545454545455e-06,
"loss": 0.5041,
"step": 174
},
{
"epoch": 0.4971590909090909,
"grad_norm": 1.395243731703088,
"learning_rate": 9.886363636363637e-06,
"loss": 0.5926,
"step": 175
},
{
"epoch": 0.5,
"grad_norm": 1.617882984644789,
"learning_rate": 9.943181818181819e-06,
"loss": 0.5188,
"step": 176
},
{
"epoch": 0.5028409090909091,
"grad_norm": 1.6266270332812511,
"learning_rate": 1e-05,
"loss": 0.5424,
"step": 177
},
{
"epoch": 0.5056818181818182,
"grad_norm": 1.7283548702418214,
"learning_rate": 9.999990166021438e-06,
"loss": 0.5037,
"step": 178
},
{
"epoch": 0.5085227272727273,
"grad_norm": 1.5500412944724196,
"learning_rate": 9.999960664124435e-06,
"loss": 0.5191,
"step": 179
},
{
"epoch": 0.5113636363636364,
"grad_norm": 1.650388352182727,
"learning_rate": 9.999911494425041e-06,
"loss": 0.5709,
"step": 180
},
{
"epoch": 0.5142045454545454,
"grad_norm": 1.7425734419236485,
"learning_rate": 9.999842657116667e-06,
"loss": 0.5537,
"step": 181
},
{
"epoch": 0.5170454545454546,
"grad_norm": 1.7441409412425497,
"learning_rate": 9.99975415247009e-06,
"loss": 0.5236,
"step": 182
},
{
"epoch": 0.5198863636363636,
"grad_norm": 1.7268326575282316,
"learning_rate": 9.999645980833454e-06,
"loss": 0.5273,
"step": 183
},
{
"epoch": 0.5227272727272727,
"grad_norm": 1.5872940055299185,
"learning_rate": 9.999518142632263e-06,
"loss": 0.5294,
"step": 184
},
{
"epoch": 0.5255681818181818,
"grad_norm": 1.5813034283378942,
"learning_rate": 9.999370638369377e-06,
"loss": 0.5202,
"step": 185
},
{
"epoch": 0.5284090909090909,
"grad_norm": 1.7024661321437708,
"learning_rate": 9.999203468625017e-06,
"loss": 0.514,
"step": 186
},
{
"epoch": 0.53125,
"grad_norm": 1.670438625550961,
"learning_rate": 9.999016634056764e-06,
"loss": 0.5139,
"step": 187
},
{
"epoch": 0.5340909090909091,
"grad_norm": 1.669791599161175,
"learning_rate": 9.998810135399545e-06,
"loss": 0.5218,
"step": 188
},
{
"epoch": 0.5369318181818182,
"grad_norm": 1.644186005465554,
"learning_rate": 9.998583973465647e-06,
"loss": 0.5487,
"step": 189
},
{
"epoch": 0.5397727272727273,
"grad_norm": 1.5862364549108896,
"learning_rate": 9.998338149144693e-06,
"loss": 0.531,
"step": 190
},
{
"epoch": 0.5426136363636364,
"grad_norm": 1.5651823720767404,
"learning_rate": 9.998072663403657e-06,
"loss": 0.5238,
"step": 191
},
{
"epoch": 0.5454545454545454,
"grad_norm": 1.449366653938261,
"learning_rate": 9.997787517286852e-06,
"loss": 0.5527,
"step": 192
},
{
"epoch": 0.5482954545454546,
"grad_norm": 1.552109203796676,
"learning_rate": 9.997482711915926e-06,
"loss": 0.5169,
"step": 193
},
{
"epoch": 0.5511363636363636,
"grad_norm": 1.5316780272692387,
"learning_rate": 9.99715824848986e-06,
"loss": 0.5419,
"step": 194
},
{
"epoch": 0.5539772727272727,
"grad_norm": 1.5561708694343155,
"learning_rate": 9.99681412828496e-06,
"loss": 0.5414,
"step": 195
},
{
"epoch": 0.5568181818181818,
"grad_norm": 1.6124118322811405,
"learning_rate": 9.996450352654852e-06,
"loss": 0.5193,
"step": 196
},
{
"epoch": 0.5596590909090909,
"grad_norm": 1.7208000153520733,
"learning_rate": 9.996066923030484e-06,
"loss": 0.5567,
"step": 197
},
{
"epoch": 0.5625,
"grad_norm": 1.5778279523696555,
"learning_rate": 9.995663840920109e-06,
"loss": 0.5053,
"step": 198
},
{
"epoch": 0.5653409090909091,
"grad_norm": 1.4855103243467769,
"learning_rate": 9.99524110790929e-06,
"loss": 0.5226,
"step": 199
},
{
"epoch": 0.5681818181818182,
"grad_norm": 1.667759826450969,
"learning_rate": 9.994798725660884e-06,
"loss": 0.4973,
"step": 200
},
{
"epoch": 0.5710227272727273,
"grad_norm": 1.373799492473821,
"learning_rate": 9.994336695915041e-06,
"loss": 0.5007,
"step": 201
},
{
"epoch": 0.5738636363636364,
"grad_norm": 1.648039525222272,
"learning_rate": 9.993855020489198e-06,
"loss": 0.5144,
"step": 202
},
{
"epoch": 0.5767045454545454,
"grad_norm": 1.9647826400305184,
"learning_rate": 9.993353701278072e-06,
"loss": 0.543,
"step": 203
},
{
"epoch": 0.5795454545454546,
"grad_norm": 1.5693550891452794,
"learning_rate": 9.992832740253646e-06,
"loss": 0.5407,
"step": 204
},
{
"epoch": 0.5823863636363636,
"grad_norm": 1.6605237932745462,
"learning_rate": 9.992292139465166e-06,
"loss": 0.5447,
"step": 205
},
{
"epoch": 0.5852272727272727,
"grad_norm": 1.5688128583768695,
"learning_rate": 9.991731901039137e-06,
"loss": 0.5655,
"step": 206
},
{
"epoch": 0.5880681818181818,
"grad_norm": 1.6702105284432525,
"learning_rate": 9.991152027179307e-06,
"loss": 0.5074,
"step": 207
},
{
"epoch": 0.5909090909090909,
"grad_norm": 1.6991173010152403,
"learning_rate": 9.990552520166664e-06,
"loss": 0.54,
"step": 208
},
{
"epoch": 0.59375,
"grad_norm": 1.5436278737148996,
"learning_rate": 9.989933382359423e-06,
"loss": 0.5029,
"step": 209
},
{
"epoch": 0.5965909090909091,
"grad_norm": 1.6440577419199078,
"learning_rate": 9.989294616193018e-06,
"loss": 0.4773,
"step": 210
},
{
"epoch": 0.5994318181818182,
"grad_norm": 1.7071361354898558,
"learning_rate": 9.988636224180097e-06,
"loss": 0.5053,
"step": 211
},
{
"epoch": 0.6022727272727273,
"grad_norm": 1.7456629565862272,
"learning_rate": 9.9879582089105e-06,
"loss": 0.5004,
"step": 212
},
{
"epoch": 0.6051136363636364,
"grad_norm": 1.5909572264549439,
"learning_rate": 9.987260573051268e-06,
"loss": 0.5599,
"step": 213
},
{
"epoch": 0.6079545454545454,
"grad_norm": 1.7034637597618845,
"learning_rate": 9.986543319346613e-06,
"loss": 0.5436,
"step": 214
},
{
"epoch": 0.6107954545454546,
"grad_norm": 1.5069569616160918,
"learning_rate": 9.985806450617916e-06,
"loss": 0.5251,
"step": 215
},
{
"epoch": 0.6136363636363636,
"grad_norm": 1.5941088473148675,
"learning_rate": 9.985049969763721e-06,
"loss": 0.4738,
"step": 216
},
{
"epoch": 0.6164772727272727,
"grad_norm": 1.5111577587089249,
"learning_rate": 9.984273879759713e-06,
"loss": 0.5258,
"step": 217
},
{
"epoch": 0.6193181818181818,
"grad_norm": 1.4761221362965744,
"learning_rate": 9.983478183658712e-06,
"loss": 0.4785,
"step": 218
},
{
"epoch": 0.6221590909090909,
"grad_norm": 1.5040577229652037,
"learning_rate": 9.982662884590662e-06,
"loss": 0.4901,
"step": 219
},
{
"epoch": 0.625,
"grad_norm": 1.627974036727338,
"learning_rate": 9.981827985762618e-06,
"loss": 0.5002,
"step": 220
},
{
"epoch": 0.6278409090909091,
"grad_norm": 1.4751468229543794,
"learning_rate": 9.980973490458728e-06,
"loss": 0.5355,
"step": 221
},
{
"epoch": 0.6306818181818182,
"grad_norm": 1.5930987113010384,
"learning_rate": 9.980099402040231e-06,
"loss": 0.5668,
"step": 222
},
{
"epoch": 0.6335227272727273,
"grad_norm": 1.5813337316824179,
"learning_rate": 9.97920572394543e-06,
"loss": 0.4356,
"step": 223
},
{
"epoch": 0.6363636363636364,
"grad_norm": 1.774178789758969,
"learning_rate": 9.978292459689692e-06,
"loss": 0.5494,
"step": 224
},
{
"epoch": 0.6392045454545454,
"grad_norm": 1.5021051099138403,
"learning_rate": 9.977359612865424e-06,
"loss": 0.5346,
"step": 225
},
{
"epoch": 0.6420454545454546,
"grad_norm": 1.6931070839234672,
"learning_rate": 9.976407187142066e-06,
"loss": 0.5133,
"step": 226
},
{
"epoch": 0.6448863636363636,
"grad_norm": 1.725508341978023,
"learning_rate": 9.975435186266069e-06,
"loss": 0.5001,
"step": 227
},
{
"epoch": 0.6477272727272727,
"grad_norm": 1.4914218163125852,
"learning_rate": 9.974443614060889e-06,
"loss": 0.4941,
"step": 228
},
{
"epoch": 0.6505681818181818,
"grad_norm": 1.4782701475273787,
"learning_rate": 9.973432474426968e-06,
"loss": 0.5291,
"step": 229
},
{
"epoch": 0.6534090909090909,
"grad_norm": 1.5178330023195905,
"learning_rate": 9.972401771341711e-06,
"loss": 0.5118,
"step": 230
},
{
"epoch": 0.65625,
"grad_norm": 1.5738932861884023,
"learning_rate": 9.971351508859488e-06,
"loss": 0.5128,
"step": 231
},
{
"epoch": 0.6590909090909091,
"grad_norm": 1.5386697277681818,
"learning_rate": 9.970281691111598e-06,
"loss": 0.4767,
"step": 232
},
{
"epoch": 0.6619318181818182,
"grad_norm": 1.4630099429104129,
"learning_rate": 9.969192322306271e-06,
"loss": 0.4969,
"step": 233
},
{
"epoch": 0.6647727272727273,
"grad_norm": 1.3480903948948726,
"learning_rate": 9.968083406728637e-06,
"loss": 0.5732,
"step": 234
},
{
"epoch": 0.6676136363636364,
"grad_norm": 1.6632107802937945,
"learning_rate": 9.966954948740717e-06,
"loss": 0.4828,
"step": 235
},
{
"epoch": 0.6704545454545454,
"grad_norm": 1.479782256264717,
"learning_rate": 9.965806952781402e-06,
"loss": 0.5209,
"step": 236
},
{
"epoch": 0.6732954545454546,
"grad_norm": 1.6152559879763004,
"learning_rate": 9.964639423366442e-06,
"loss": 0.5247,
"step": 237
},
{
"epoch": 0.6761363636363636,
"grad_norm": 1.355213083704857,
"learning_rate": 9.96345236508842e-06,
"loss": 0.54,
"step": 238
},
{
"epoch": 0.6789772727272727,
"grad_norm": 1.4708340790563177,
"learning_rate": 9.962245782616734e-06,
"loss": 0.5178,
"step": 239
},
{
"epoch": 0.6818181818181818,
"grad_norm": 1.7002734228704037,
"learning_rate": 9.961019680697593e-06,
"loss": 0.4839,
"step": 240
},
{
"epoch": 0.6846590909090909,
"grad_norm": 1.5855366607994574,
"learning_rate": 9.959774064153977e-06,
"loss": 0.4664,
"step": 241
},
{
"epoch": 0.6875,
"grad_norm": 1.4710861883816675,
"learning_rate": 9.958508937885633e-06,
"loss": 0.5202,
"step": 242
},
{
"epoch": 0.6903409090909091,
"grad_norm": 1.5665588077627386,
"learning_rate": 9.957224306869053e-06,
"loss": 0.5082,
"step": 243
},
{
"epoch": 0.6931818181818182,
"grad_norm": 1.5137831929607448,
"learning_rate": 9.955920176157448e-06,
"loss": 0.4817,
"step": 244
},
{
"epoch": 0.6960227272727273,
"grad_norm": 1.5362365311838206,
"learning_rate": 9.954596550880735e-06,
"loss": 0.4949,
"step": 245
},
{
"epoch": 0.6988636363636364,
"grad_norm": 1.5434852584227288,
"learning_rate": 9.953253436245519e-06,
"loss": 0.5229,
"step": 246
},
{
"epoch": 0.7017045454545454,
"grad_norm": 1.4438612383868545,
"learning_rate": 9.951890837535058e-06,
"loss": 0.5273,
"step": 247
},
{
"epoch": 0.7045454545454546,
"grad_norm": 1.6229915207728822,
"learning_rate": 9.950508760109265e-06,
"loss": 0.4944,
"step": 248
},
{
"epoch": 0.7073863636363636,
"grad_norm": 1.3885376242229452,
"learning_rate": 9.949107209404664e-06,
"loss": 0.541,
"step": 249
},
{
"epoch": 0.7102272727272727,
"grad_norm": 1.4719200143551339,
"learning_rate": 9.947686190934385e-06,
"loss": 0.5062,
"step": 250
},
{
"epoch": 0.7130681818181818,
"grad_norm": 1.4430803001668344,
"learning_rate": 9.946245710288132e-06,
"loss": 0.5253,
"step": 251
},
{
"epoch": 0.7159090909090909,
"grad_norm": 1.4755305938799816,
"learning_rate": 9.944785773132168e-06,
"loss": 0.4852,
"step": 252
},
{
"epoch": 0.71875,
"grad_norm": 1.5136651249966744,
"learning_rate": 9.94330638520929e-06,
"loss": 0.5463,
"step": 253
},
{
"epoch": 0.7215909090909091,
"grad_norm": 1.5294777974746627,
"learning_rate": 9.941807552338805e-06,
"loss": 0.475,
"step": 254
},
{
"epoch": 0.7244318181818182,
"grad_norm": 1.605259878893428,
"learning_rate": 9.940289280416509e-06,
"loss": 0.4776,
"step": 255
},
{
"epoch": 0.7272727272727273,
"grad_norm": 1.315713602097482,
"learning_rate": 9.938751575414663e-06,
"loss": 0.5175,
"step": 256
},
{
"epoch": 0.7301136363636364,
"grad_norm": 1.3927615412027392,
"learning_rate": 9.937194443381972e-06,
"loss": 0.4765,
"step": 257
},
{
"epoch": 0.7329545454545454,
"grad_norm": 1.4339295292132817,
"learning_rate": 9.935617890443557e-06,
"loss": 0.4985,
"step": 258
},
{
"epoch": 0.7357954545454546,
"grad_norm": 1.3968961127996686,
"learning_rate": 9.934021922800931e-06,
"loss": 0.4704,
"step": 259
},
{
"epoch": 0.7386363636363636,
"grad_norm": 1.44031807612239,
"learning_rate": 9.932406546731981e-06,
"loss": 0.5186,
"step": 260
},
{
"epoch": 0.7414772727272727,
"grad_norm": 1.5831116349041292,
"learning_rate": 9.930771768590934e-06,
"loss": 0.4988,
"step": 261
},
{
"epoch": 0.7443181818181818,
"grad_norm": 1.371989349404777,
"learning_rate": 9.929117594808341e-06,
"loss": 0.5018,
"step": 262
},
{
"epoch": 0.7471590909090909,
"grad_norm": 1.5053430727565054,
"learning_rate": 9.927444031891044e-06,
"loss": 0.5703,
"step": 263
},
{
"epoch": 0.75,
"grad_norm": 2.0336405552487338,
"learning_rate": 9.92575108642216e-06,
"loss": 0.4771,
"step": 264
},
{
"epoch": 0.7528409090909091,
"grad_norm": 1.3896429508762578,
"learning_rate": 9.924038765061042e-06,
"loss": 0.4709,
"step": 265
},
{
"epoch": 0.7556818181818182,
"grad_norm": 1.5824500328291682,
"learning_rate": 9.92230707454326e-06,
"loss": 0.4818,
"step": 266
},
{
"epoch": 0.7585227272727273,
"grad_norm": 1.537798794932119,
"learning_rate": 9.92055602168058e-06,
"loss": 0.4845,
"step": 267
},
{
"epoch": 0.7613636363636364,
"grad_norm": 1.4323222954770605,
"learning_rate": 9.918785613360931e-06,
"loss": 0.5638,
"step": 268
},
{
"epoch": 0.7642045454545454,
"grad_norm": 1.5617485354631635,
"learning_rate": 9.916995856548371e-06,
"loss": 0.4595,
"step": 269
},
{
"epoch": 0.7670454545454546,
"grad_norm": 1.6295758621364878,
"learning_rate": 9.915186758283072e-06,
"loss": 0.5003,
"step": 270
},
{
"epoch": 0.7698863636363636,
"grad_norm": 1.7193205168854822,
"learning_rate": 9.913358325681292e-06,
"loss": 0.4921,
"step": 271
},
{
"epoch": 0.7727272727272727,
"grad_norm": 1.4343207005446186,
"learning_rate": 9.911510565935335e-06,
"loss": 0.5572,
"step": 272
},
{
"epoch": 0.7755681818181818,
"grad_norm": 1.5153801998849168,
"learning_rate": 9.909643486313533e-06,
"loss": 0.4599,
"step": 273
},
{
"epoch": 0.7784090909090909,
"grad_norm": 1.5255133825708138,
"learning_rate": 9.907757094160217e-06,
"loss": 0.4847,
"step": 274
},
{
"epoch": 0.78125,
"grad_norm": 1.5245888606968223,
"learning_rate": 9.905851396895679e-06,
"loss": 0.5101,
"step": 275
},
{
"epoch": 0.7840909090909091,
"grad_norm": 1.54702192325514,
"learning_rate": 9.903926402016153e-06,
"loss": 0.5076,
"step": 276
},
{
"epoch": 0.7869318181818182,
"grad_norm": 1.6313992214857218,
"learning_rate": 9.901982117093786e-06,
"loss": 0.4601,
"step": 277
},
{
"epoch": 0.7897727272727273,
"grad_norm": 1.5915121599329511,
"learning_rate": 9.900018549776598e-06,
"loss": 0.4865,
"step": 278
},
{
"epoch": 0.7926136363636364,
"grad_norm": 1.5326657935013723,
"learning_rate": 9.898035707788462e-06,
"loss": 0.5098,
"step": 279
},
{
"epoch": 0.7954545454545454,
"grad_norm": 1.701813376109124,
"learning_rate": 9.896033598929069e-06,
"loss": 0.5362,
"step": 280
},
{
"epoch": 0.7982954545454546,
"grad_norm": 1.4800001962097533,
"learning_rate": 9.894012231073895e-06,
"loss": 0.5324,
"step": 281
},
{
"epoch": 0.8011363636363636,
"grad_norm": 1.4483575554579828,
"learning_rate": 9.891971612174176e-06,
"loss": 0.4944,
"step": 282
},
{
"epoch": 0.8039772727272727,
"grad_norm": 1.480969574220171,
"learning_rate": 9.889911750256873e-06,
"loss": 0.514,
"step": 283
},
{
"epoch": 0.8068181818181818,
"grad_norm": 1.6355803470028214,
"learning_rate": 9.88783265342464e-06,
"loss": 0.5011,
"step": 284
},
{
"epoch": 0.8096590909090909,
"grad_norm": 1.4490799988047873,
"learning_rate": 9.885734329855798e-06,
"loss": 0.5329,
"step": 285
},
{
"epoch": 0.8125,
"grad_norm": 1.5125220233355698,
"learning_rate": 9.883616787804292e-06,
"loss": 0.5061,
"step": 286
},
{
"epoch": 0.8153409090909091,
"grad_norm": 1.5492210249492218,
"learning_rate": 9.881480035599667e-06,
"loss": 0.5153,
"step": 287
},
{
"epoch": 0.8181818181818182,
"grad_norm": 1.400547790361256,
"learning_rate": 9.879324081647035e-06,
"loss": 0.5208,
"step": 288
},
{
"epoch": 0.8210227272727273,
"grad_norm": 1.5862814840089992,
"learning_rate": 9.877148934427037e-06,
"loss": 0.5043,
"step": 289
},
{
"epoch": 0.8238636363636364,
"grad_norm": 1.5846145487408034,
"learning_rate": 9.874954602495811e-06,
"loss": 0.4685,
"step": 290
},
{
"epoch": 0.8267045454545454,
"grad_norm": 1.5029187410604994,
"learning_rate": 9.872741094484965e-06,
"loss": 0.469,
"step": 291
},
{
"epoch": 0.8295454545454546,
"grad_norm": 1.4366394515179206,
"learning_rate": 9.870508419101536e-06,
"loss": 0.5196,
"step": 292
},
{
"epoch": 0.8323863636363636,
"grad_norm": 1.6410923835471025,
"learning_rate": 9.868256585127956e-06,
"loss": 0.5072,
"step": 293
},
{
"epoch": 0.8352272727272727,
"grad_norm": 1.6058263511201358,
"learning_rate": 9.865985601422018e-06,
"loss": 0.5226,
"step": 294
},
{
"epoch": 0.8380681818181818,
"grad_norm": 1.5406427651119536,
"learning_rate": 9.863695476916846e-06,
"loss": 0.5513,
"step": 295
},
{
"epoch": 0.8409090909090909,
"grad_norm": 1.5483143683420586,
"learning_rate": 9.861386220620853e-06,
"loss": 0.5272,
"step": 296
},
{
"epoch": 0.84375,
"grad_norm": 1.5737342478379488,
"learning_rate": 9.859057841617709e-06,
"loss": 0.4981,
"step": 297
},
{
"epoch": 0.8465909090909091,
"grad_norm": 1.5552099127946464,
"learning_rate": 9.856710349066307e-06,
"loss": 0.5316,
"step": 298
},
{
"epoch": 0.8494318181818182,
"grad_norm": 1.3775976434027732,
"learning_rate": 9.854343752200725e-06,
"loss": 0.5028,
"step": 299
},
{
"epoch": 0.8522727272727273,
"grad_norm": 1.534882345079291,
"learning_rate": 9.851958060330186e-06,
"loss": 0.5037,
"step": 300
},
{
"epoch": 0.8551136363636364,
"grad_norm": 1.3965739101658592,
"learning_rate": 9.849553282839025e-06,
"loss": 0.4902,
"step": 301
},
{
"epoch": 0.8579545454545454,
"grad_norm": 1.5164723826063233,
"learning_rate": 9.847129429186662e-06,
"loss": 0.453,
"step": 302
},
{
"epoch": 0.8607954545454546,
"grad_norm": 1.4716195200624351,
"learning_rate": 9.844686508907538e-06,
"loss": 0.5214,
"step": 303
},
{
"epoch": 0.8636363636363636,
"grad_norm": 1.496443165367598,
"learning_rate": 9.842224531611106e-06,
"loss": 0.4291,
"step": 304
},
{
"epoch": 0.8664772727272727,
"grad_norm": 1.4268349606630073,
"learning_rate": 9.839743506981783e-06,
"loss": 0.5016,
"step": 305
},
{
"epoch": 0.8693181818181818,
"grad_norm": 1.552129131024092,
"learning_rate": 9.8372434447789e-06,
"loss": 0.499,
"step": 306
},
{
"epoch": 0.8721590909090909,
"grad_norm": 1.3993525226391348,
"learning_rate": 9.834724354836684e-06,
"loss": 0.5012,
"step": 307
},
{
"epoch": 0.875,
"grad_norm": 1.3911264167867943,
"learning_rate": 9.832186247064205e-06,
"loss": 0.5079,
"step": 308
},
{
"epoch": 0.8778409090909091,
"grad_norm": 1.4874539088146177,
"learning_rate": 9.829629131445342e-06,
"loss": 0.4655,
"step": 309
},
{
"epoch": 0.8806818181818182,
"grad_norm": 1.4484874853487535,
"learning_rate": 9.827053018038743e-06,
"loss": 0.447,
"step": 310
},
{
"epoch": 0.8835227272727273,
"grad_norm": 1.4169976323362639,
"learning_rate": 9.824457916977785e-06,
"loss": 0.5169,
"step": 311
},
{
"epoch": 0.8863636363636364,
"grad_norm": 1.5792992862826667,
"learning_rate": 9.821843838470536e-06,
"loss": 0.4499,
"step": 312
},
{
"epoch": 0.8892045454545454,
"grad_norm": 1.3935050163686387,
"learning_rate": 9.819210792799711e-06,
"loss": 0.4836,
"step": 313
},
{
"epoch": 0.8920454545454546,
"grad_norm": 1.560801731687215,
"learning_rate": 9.816558790322638e-06,
"loss": 0.4627,
"step": 314
},
{
"epoch": 0.8948863636363636,
"grad_norm": 1.8175856225601175,
"learning_rate": 9.81388784147121e-06,
"loss": 0.4842,
"step": 315
},
{
"epoch": 0.8977272727272727,
"grad_norm": 1.59068026179008,
"learning_rate": 9.811197956751851e-06,
"loss": 0.5668,
"step": 316
},
{
"epoch": 0.9005681818181818,
"grad_norm": 1.4641719221220586,
"learning_rate": 9.808489146745466e-06,
"loss": 0.4721,
"step": 317
},
{
"epoch": 0.9034090909090909,
"grad_norm": 1.433777355819498,
"learning_rate": 9.805761422107407e-06,
"loss": 0.4943,
"step": 318
},
{
"epoch": 0.90625,
"grad_norm": 1.5468925623745085,
"learning_rate": 9.803014793567429e-06,
"loss": 0.4907,
"step": 319
},
{
"epoch": 0.9090909090909091,
"grad_norm": 1.581396450996295,
"learning_rate": 9.800249271929645e-06,
"loss": 0.4494,
"step": 320
},
{
"epoch": 0.9119318181818182,
"grad_norm": 1.4624512226735435,
"learning_rate": 9.797464868072489e-06,
"loss": 0.4803,
"step": 321
},
{
"epoch": 0.9147727272727273,
"grad_norm": 1.431526222855722,
"learning_rate": 9.794661592948665e-06,
"loss": 0.4698,
"step": 322
},
{
"epoch": 0.9176136363636364,
"grad_norm": 1.561310299668803,
"learning_rate": 9.791839457585118e-06,
"loss": 0.5117,
"step": 323
},
{
"epoch": 0.9204545454545454,
"grad_norm": 1.625460220250549,
"learning_rate": 9.788998473082968e-06,
"loss": 0.4971,
"step": 324
},
{
"epoch": 0.9232954545454546,
"grad_norm": 1.6121422574836697,
"learning_rate": 9.786138650617494e-06,
"loss": 0.5365,
"step": 325
},
{
"epoch": 0.9261363636363636,
"grad_norm": 1.502479264528449,
"learning_rate": 9.783260001438066e-06,
"loss": 0.4695,
"step": 326
},
{
"epoch": 0.9289772727272727,
"grad_norm": 1.505956180933774,
"learning_rate": 9.780362536868113e-06,
"loss": 0.4844,
"step": 327
},
{
"epoch": 0.9318181818181818,
"grad_norm": 1.6301168834074384,
"learning_rate": 9.777446268305079e-06,
"loss": 0.4709,
"step": 328
},
{
"epoch": 0.9346590909090909,
"grad_norm": 1.5755579003886004,
"learning_rate": 9.774511207220369e-06,
"loss": 0.5236,
"step": 329
},
{
"epoch": 0.9375,
"grad_norm": 1.6499540937400865,
"learning_rate": 9.77155736515932e-06,
"loss": 0.5175,
"step": 330
},
{
"epoch": 0.9403409090909091,
"grad_norm": 1.5960557618257305,
"learning_rate": 9.768584753741134e-06,
"loss": 0.498,
"step": 331
},
{
"epoch": 0.9431818181818182,
"grad_norm": 1.5941569372324007,
"learning_rate": 9.765593384658855e-06,
"loss": 0.4732,
"step": 332
},
{
"epoch": 0.9460227272727273,
"grad_norm": 1.5228325929762483,
"learning_rate": 9.762583269679304e-06,
"loss": 0.4431,
"step": 333
},
{
"epoch": 0.9488636363636364,
"grad_norm": 1.490741833018044,
"learning_rate": 9.759554420643043e-06,
"loss": 0.5188,
"step": 334
},
{
"epoch": 0.9517045454545454,
"grad_norm": 1.6002668286658763,
"learning_rate": 9.756506849464327e-06,
"loss": 0.4839,
"step": 335
},
{
"epoch": 0.9545454545454546,
"grad_norm": 1.4598244710308395,
"learning_rate": 9.753440568131056e-06,
"loss": 0.4926,
"step": 336
},
{
"epoch": 0.9573863636363636,
"grad_norm": 1.4532786929990509,
"learning_rate": 9.750355588704728e-06,
"loss": 0.509,
"step": 337
},
{
"epoch": 0.9602272727272727,
"grad_norm": 1.6104333498186194,
"learning_rate": 9.74725192332039e-06,
"loss": 0.4843,
"step": 338
},
{
"epoch": 0.9630681818181818,
"grad_norm": 1.5091072452463585,
"learning_rate": 9.744129584186599e-06,
"loss": 0.5373,
"step": 339
},
{
"epoch": 0.9659090909090909,
"grad_norm": 1.3854787912992945,
"learning_rate": 9.740988583585356e-06,
"loss": 0.4462,
"step": 340
},
{
"epoch": 0.96875,
"grad_norm": 1.3149781820395967,
"learning_rate": 9.737828933872076e-06,
"loss": 0.5111,
"step": 341
},
{
"epoch": 0.9715909090909091,
"grad_norm": 1.4869043041061063,
"learning_rate": 9.73465064747553e-06,
"loss": 0.4843,
"step": 342
},
{
"epoch": 0.9744318181818182,
"grad_norm": 1.4416568690760034,
"learning_rate": 9.731453736897796e-06,
"loss": 0.4658,
"step": 343
},
{
"epoch": 0.9772727272727273,
"grad_norm": 1.5490848888116837,
"learning_rate": 9.72823821471422e-06,
"loss": 0.4676,
"step": 344
},
{
"epoch": 0.9801136363636364,
"grad_norm": 1.3810890147044554,
"learning_rate": 9.725004093573343e-06,
"loss": 0.495,
"step": 345
},
{
"epoch": 0.9829545454545454,
"grad_norm": 1.5581162695244781,
"learning_rate": 9.721751386196885e-06,
"loss": 0.5413,
"step": 346
},
{
"epoch": 0.9857954545454546,
"grad_norm": 1.5331178001377477,
"learning_rate": 9.718480105379663e-06,
"loss": 0.4913,
"step": 347
},
{
"epoch": 0.9886363636363636,
"grad_norm": 1.3200748312929138,
"learning_rate": 9.715190263989562e-06,
"loss": 0.471,
"step": 348
},
{
"epoch": 0.9914772727272727,
"grad_norm": 1.3932172431217436,
"learning_rate": 9.711881874967471e-06,
"loss": 0.4746,
"step": 349
},
{
"epoch": 0.9943181818181818,
"grad_norm": 1.596928748921533,
"learning_rate": 9.708554951327243e-06,
"loss": 0.496,
"step": 350
},
{
"epoch": 0.9971590909090909,
"grad_norm": 1.5042003988892965,
"learning_rate": 9.705209506155635e-06,
"loss": 0.5038,
"step": 351
},
{
"epoch": 1.0,
"grad_norm": 1.34623021375936,
"learning_rate": 9.701845552612261e-06,
"loss": 0.4584,
"step": 352
},
{
"epoch": 1.0028409090909092,
"grad_norm": 1.3935683216098373,
"learning_rate": 9.698463103929542e-06,
"loss": 0.416,
"step": 353
},
{
"epoch": 1.0056818181818181,
"grad_norm": 1.2572279971528715,
"learning_rate": 9.695062173412648e-06,
"loss": 0.3841,
"step": 354
},
{
"epoch": 1.0085227272727273,
"grad_norm": 1.331784432855466,
"learning_rate": 9.69164277443945e-06,
"loss": 0.411,
"step": 355
},
{
"epoch": 1.0113636363636365,
"grad_norm": 1.3861844220787067,
"learning_rate": 9.688204920460467e-06,
"loss": 0.4348,
"step": 356
},
{
"epoch": 1.0142045454545454,
"grad_norm": 1.3709086922612779,
"learning_rate": 9.68474862499881e-06,
"loss": 0.4243,
"step": 357
},
{
"epoch": 1.0170454545454546,
"grad_norm": 1.4954332771145582,
"learning_rate": 9.681273901650134e-06,
"loss": 0.3301,
"step": 358
},
{
"epoch": 1.0198863636363635,
"grad_norm": 1.5628318749217331,
"learning_rate": 9.677780764082583e-06,
"loss": 0.3853,
"step": 359
},
{
"epoch": 1.0227272727272727,
"grad_norm": 1.4017875812891134,
"learning_rate": 9.67426922603673e-06,
"loss": 0.4367,
"step": 360
},
{
"epoch": 1.0255681818181819,
"grad_norm": 1.5499407509600363,
"learning_rate": 9.670739301325534e-06,
"loss": 0.4372,
"step": 361
},
{
"epoch": 1.0284090909090908,
"grad_norm": 1.3221335169217352,
"learning_rate": 9.667191003834276e-06,
"loss": 0.3944,
"step": 362
},
{
"epoch": 1.03125,
"grad_norm": 1.2928319666303545,
"learning_rate": 9.663624347520506e-06,
"loss": 0.3913,
"step": 363
},
{
"epoch": 1.0340909090909092,
"grad_norm": 1.3237996792052562,
"learning_rate": 9.660039346413994e-06,
"loss": 0.3677,
"step": 364
},
{
"epoch": 1.0369318181818181,
"grad_norm": 1.356408795095918,
"learning_rate": 9.65643601461667e-06,
"loss": 0.3683,
"step": 365
},
{
"epoch": 1.0397727272727273,
"grad_norm": 1.3648543824708266,
"learning_rate": 9.65281436630257e-06,
"loss": 0.4299,
"step": 366
},
{
"epoch": 1.0426136363636365,
"grad_norm": 1.306204197525364,
"learning_rate": 9.649174415717776e-06,
"loss": 0.3814,
"step": 367
},
{
"epoch": 1.0454545454545454,
"grad_norm": 1.378237936287675,
"learning_rate": 9.64551617718037e-06,
"loss": 0.3856,
"step": 368
},
{
"epoch": 1.0482954545454546,
"grad_norm": 1.347725144403338,
"learning_rate": 9.641839665080363e-06,
"loss": 0.3868,
"step": 369
},
{
"epoch": 1.0511363636363635,
"grad_norm": 1.2789287048030384,
"learning_rate": 9.638144893879657e-06,
"loss": 0.4427,
"step": 370
},
{
"epoch": 1.0539772727272727,
"grad_norm": 1.4122383248910728,
"learning_rate": 9.634431878111969e-06,
"loss": 0.372,
"step": 371
},
{
"epoch": 1.0568181818181819,
"grad_norm": 1.382980088005404,
"learning_rate": 9.630700632382787e-06,
"loss": 0.398,
"step": 372
},
{
"epoch": 1.0596590909090908,
"grad_norm": 1.4228786699415132,
"learning_rate": 9.626951171369306e-06,
"loss": 0.4086,
"step": 373
},
{
"epoch": 1.0625,
"grad_norm": 1.2951379080398953,
"learning_rate": 9.623183509820376e-06,
"loss": 0.4102,
"step": 374
},
{
"epoch": 1.0653409090909092,
"grad_norm": 1.3372702622351302,
"learning_rate": 9.619397662556434e-06,
"loss": 0.387,
"step": 375
},
{
"epoch": 1.0681818181818181,
"grad_norm": 1.4961377119399946,
"learning_rate": 9.61559364446946e-06,
"loss": 0.3735,
"step": 376
},
{
"epoch": 1.0710227272727273,
"grad_norm": 1.4498025423293337,
"learning_rate": 9.611771470522908e-06,
"loss": 0.4034,
"step": 377
},
{
"epoch": 1.0738636363636365,
"grad_norm": 1.3626788600371367,
"learning_rate": 9.607931155751646e-06,
"loss": 0.321,
"step": 378
},
{
"epoch": 1.0767045454545454,
"grad_norm": 1.4483349711836415,
"learning_rate": 9.604072715261902e-06,
"loss": 0.3996,
"step": 379
},
{
"epoch": 1.0795454545454546,
"grad_norm": 1.4328248222967257,
"learning_rate": 9.600196164231209e-06,
"loss": 0.4367,
"step": 380
},
{
"epoch": 1.0823863636363635,
"grad_norm": 1.5892165025198344,
"learning_rate": 9.596301517908329e-06,
"loss": 0.3816,
"step": 381
},
{
"epoch": 1.0852272727272727,
"grad_norm": 1.4702034999304534,
"learning_rate": 9.592388791613215e-06,
"loss": 0.3685,
"step": 382
},
{
"epoch": 1.0880681818181819,
"grad_norm": 1.4201126245689717,
"learning_rate": 9.588458000736929e-06,
"loss": 0.4035,
"step": 383
},
{
"epoch": 1.0909090909090908,
"grad_norm": 1.4451056473536599,
"learning_rate": 9.584509160741599e-06,
"loss": 0.3979,
"step": 384
},
{
"epoch": 1.09375,
"grad_norm": 1.4239268094337205,
"learning_rate": 9.580542287160348e-06,
"loss": 0.4152,
"step": 385
},
{
"epoch": 1.0965909090909092,
"grad_norm": 1.3727180391928673,
"learning_rate": 9.576557395597237e-06,
"loss": 0.351,
"step": 386
},
{
"epoch": 1.0994318181818181,
"grad_norm": 1.3764065056655754,
"learning_rate": 9.572554501727198e-06,
"loss": 0.3993,
"step": 387
},
{
"epoch": 1.1022727272727273,
"grad_norm": 1.350149088431701,
"learning_rate": 9.568533621295982e-06,
"loss": 0.3802,
"step": 388
},
{
"epoch": 1.1051136363636365,
"grad_norm": 1.3494913492931555,
"learning_rate": 9.564494770120089e-06,
"loss": 0.3976,
"step": 389
},
{
"epoch": 1.1079545454545454,
"grad_norm": 1.2895978086734443,
"learning_rate": 9.560437964086713e-06,
"loss": 0.3737,
"step": 390
},
{
"epoch": 1.1107954545454546,
"grad_norm": 1.3580957096749768,
"learning_rate": 9.556363219153664e-06,
"loss": 0.4237,
"step": 391
},
{
"epoch": 1.1136363636363635,
"grad_norm": 1.448759353025191,
"learning_rate": 9.55227055134933e-06,
"loss": 0.365,
"step": 392
},
{
"epoch": 1.1164772727272727,
"grad_norm": 1.4090875794073592,
"learning_rate": 9.548159976772593e-06,
"loss": 0.3352,
"step": 393
},
{
"epoch": 1.1193181818181819,
"grad_norm": 1.318489892775961,
"learning_rate": 9.544031511592772e-06,
"loss": 0.3801,
"step": 394
},
{
"epoch": 1.1221590909090908,
"grad_norm": 1.482014367782798,
"learning_rate": 9.539885172049563e-06,
"loss": 0.4543,
"step": 395
},
{
"epoch": 1.125,
"grad_norm": 1.5905204206786512,
"learning_rate": 9.535720974452973e-06,
"loss": 0.3749,
"step": 396
},
{
"epoch": 1.1278409090909092,
"grad_norm": 1.4661858452157561,
"learning_rate": 9.531538935183252e-06,
"loss": 0.3671,
"step": 397
},
{
"epoch": 1.1306818181818181,
"grad_norm": 1.4153932607502078,
"learning_rate": 9.527339070690833e-06,
"loss": 0.4444,
"step": 398
},
{
"epoch": 1.1335227272727273,
"grad_norm": 1.2966945431847525,
"learning_rate": 9.52312139749627e-06,
"loss": 0.4391,
"step": 399
},
{
"epoch": 1.1363636363636362,
"grad_norm": 1.288707482023972,
"learning_rate": 9.518885932190166e-06,
"loss": 0.3696,
"step": 400
},
{
"epoch": 1.1392045454545454,
"grad_norm": 1.3246404279367519,
"learning_rate": 9.514632691433108e-06,
"loss": 0.3339,
"step": 401
},
{
"epoch": 1.1420454545454546,
"grad_norm": 1.274941281826731,
"learning_rate": 9.510361691955607e-06,
"loss": 0.3902,
"step": 402
},
{
"epoch": 1.1448863636363638,
"grad_norm": 1.2920978892355353,
"learning_rate": 9.506072950558036e-06,
"loss": 0.4159,
"step": 403
},
{
"epoch": 1.1477272727272727,
"grad_norm": 1.44258338460387,
"learning_rate": 9.501766484110546e-06,
"loss": 0.4162,
"step": 404
},
{
"epoch": 1.1505681818181819,
"grad_norm": 1.3556175583934928,
"learning_rate": 9.497442309553017e-06,
"loss": 0.4682,
"step": 405
},
{
"epoch": 1.1534090909090908,
"grad_norm": 1.4681216384949332,
"learning_rate": 9.493100443894986e-06,
"loss": 0.3963,
"step": 406
},
{
"epoch": 1.15625,
"grad_norm": 1.5449635342485868,
"learning_rate": 9.488740904215578e-06,
"loss": 0.3845,
"step": 407
},
{
"epoch": 1.1590909090909092,
"grad_norm": 1.4235183492577208,
"learning_rate": 9.484363707663443e-06,
"loss": 0.3596,
"step": 408
},
{
"epoch": 1.1619318181818181,
"grad_norm": 1.3972675999686617,
"learning_rate": 9.47996887145668e-06,
"loss": 0.3598,
"step": 409
},
{
"epoch": 1.1647727272727273,
"grad_norm": 1.3050109631890814,
"learning_rate": 9.475556412882782e-06,
"loss": 0.3975,
"step": 410
},
{
"epoch": 1.1676136363636362,
"grad_norm": 1.3019015711552302,
"learning_rate": 9.471126349298557e-06,
"loss": 0.3778,
"step": 411
},
{
"epoch": 1.1704545454545454,
"grad_norm": 1.3752118885869313,
"learning_rate": 9.466678698130064e-06,
"loss": 0.3948,
"step": 412
},
{
"epoch": 1.1732954545454546,
"grad_norm": 1.4120612953299163,
"learning_rate": 9.46221347687255e-06,
"loss": 0.4006,
"step": 413
},
{
"epoch": 1.1761363636363638,
"grad_norm": 1.3644262325212158,
"learning_rate": 9.457730703090367e-06,
"loss": 0.45,
"step": 414
},
{
"epoch": 1.1789772727272727,
"grad_norm": 1.4024791684682305,
"learning_rate": 9.453230394416914e-06,
"loss": 0.4321,
"step": 415
},
{
"epoch": 1.1818181818181819,
"grad_norm": 1.2518931861431282,
"learning_rate": 9.448712568554572e-06,
"loss": 0.4311,
"step": 416
},
{
"epoch": 1.1846590909090908,
"grad_norm": 1.3151977021290406,
"learning_rate": 9.444177243274619e-06,
"loss": 0.4405,
"step": 417
},
{
"epoch": 1.1875,
"grad_norm": 1.2870366213234796,
"learning_rate": 9.43962443641717e-06,
"loss": 0.3946,
"step": 418
},
{
"epoch": 1.1903409090909092,
"grad_norm": 1.2555358317281697,
"learning_rate": 9.43505416589111e-06,
"loss": 0.3904,
"step": 419
},
{
"epoch": 1.1931818181818181,
"grad_norm": 1.4290322380534968,
"learning_rate": 9.430466449674014e-06,
"loss": 0.4199,
"step": 420
},
{
"epoch": 1.1960227272727273,
"grad_norm": 1.3340923689952158,
"learning_rate": 9.425861305812083e-06,
"loss": 0.4016,
"step": 421
},
{
"epoch": 1.1988636363636362,
"grad_norm": 1.3506050543273165,
"learning_rate": 9.421238752420075e-06,
"loss": 0.3591,
"step": 422
},
{
"epoch": 1.2017045454545454,
"grad_norm": 1.3408737454429664,
"learning_rate": 9.416598807681221e-06,
"loss": 0.3958,
"step": 423
},
{
"epoch": 1.2045454545454546,
"grad_norm": 1.3066485596939463,
"learning_rate": 9.411941489847172e-06,
"loss": 0.3712,
"step": 424
},
{
"epoch": 1.2073863636363638,
"grad_norm": 1.386076628663598,
"learning_rate": 9.40726681723791e-06,
"loss": 0.363,
"step": 425
},
{
"epoch": 1.2102272727272727,
"grad_norm": 1.4001603892788743,
"learning_rate": 9.402574808241692e-06,
"loss": 0.3958,
"step": 426
},
{
"epoch": 1.2130681818181819,
"grad_norm": 1.5442221898856994,
"learning_rate": 9.397865481314959e-06,
"loss": 0.3936,
"step": 427
},
{
"epoch": 1.2159090909090908,
"grad_norm": 1.5030113636563918,
"learning_rate": 9.393138854982283e-06,
"loss": 0.4011,
"step": 428
},
{
"epoch": 1.21875,
"grad_norm": 1.437402205187118,
"learning_rate": 9.388394947836278e-06,
"loss": 0.4231,
"step": 429
},
{
"epoch": 1.2215909090909092,
"grad_norm": 1.395930365479985,
"learning_rate": 9.38363377853754e-06,
"loss": 0.4078,
"step": 430
},
{
"epoch": 1.2244318181818181,
"grad_norm": 1.4691665996144123,
"learning_rate": 9.37885536581456e-06,
"loss": 0.4,
"step": 431
},
{
"epoch": 1.2272727272727273,
"grad_norm": 1.3654497134259025,
"learning_rate": 9.374059728463663e-06,
"loss": 0.4093,
"step": 432
},
{
"epoch": 1.2301136363636362,
"grad_norm": 1.4295062613690053,
"learning_rate": 9.369246885348926e-06,
"loss": 0.384,
"step": 433
},
{
"epoch": 1.2329545454545454,
"grad_norm": 1.2387775411886048,
"learning_rate": 9.364416855402107e-06,
"loss": 0.3577,
"step": 434
},
{
"epoch": 1.2357954545454546,
"grad_norm": 1.3595503473660588,
"learning_rate": 9.359569657622573e-06,
"loss": 0.3957,
"step": 435
},
{
"epoch": 1.2386363636363638,
"grad_norm": 1.367378341936646,
"learning_rate": 9.35470531107722e-06,
"loss": 0.4374,
"step": 436
},
{
"epoch": 1.2414772727272727,
"grad_norm": 1.3007672389688,
"learning_rate": 9.349823834900396e-06,
"loss": 0.3906,
"step": 437
},
{
"epoch": 1.2443181818181819,
"grad_norm": 1.4358136508434372,
"learning_rate": 9.344925248293837e-06,
"loss": 0.3587,
"step": 438
},
{
"epoch": 1.2471590909090908,
"grad_norm": 1.3487005026003476,
"learning_rate": 9.340009570526578e-06,
"loss": 0.3957,
"step": 439
},
{
"epoch": 1.25,
"grad_norm": 1.3187450315078482,
"learning_rate": 9.335076820934889e-06,
"loss": 0.3913,
"step": 440
},
{
"epoch": 1.2528409090909092,
"grad_norm": 1.4046469997918478,
"learning_rate": 9.330127018922195e-06,
"loss": 0.3846,
"step": 441
},
{
"epoch": 1.2556818181818181,
"grad_norm": 1.3725821672416023,
"learning_rate": 9.325160183958987e-06,
"loss": 0.4077,
"step": 442
},
{
"epoch": 1.2585227272727273,
"grad_norm": 1.4084968261340196,
"learning_rate": 9.320176335582772e-06,
"loss": 0.3949,
"step": 443
},
{
"epoch": 1.2613636363636362,
"grad_norm": 1.4217514555447899,
"learning_rate": 9.315175493397968e-06,
"loss": 0.466,
"step": 444
},
{
"epoch": 1.2642045454545454,
"grad_norm": 1.378651964217461,
"learning_rate": 9.310157677075847e-06,
"loss": 0.3997,
"step": 445
},
{
"epoch": 1.2670454545454546,
"grad_norm": 1.4196574298188969,
"learning_rate": 9.30512290635445e-06,
"loss": 0.3745,
"step": 446
},
{
"epoch": 1.2698863636363638,
"grad_norm": 1.3558455660245554,
"learning_rate": 9.300071201038503e-06,
"loss": 0.4164,
"step": 447
},
{
"epoch": 1.2727272727272727,
"grad_norm": 1.3784613104346528,
"learning_rate": 9.295002580999355e-06,
"loss": 0.3385,
"step": 448
},
{
"epoch": 1.2755681818181819,
"grad_norm": 1.4749483610049798,
"learning_rate": 9.289917066174887e-06,
"loss": 0.389,
"step": 449
},
{
"epoch": 1.2784090909090908,
"grad_norm": 1.394529342297257,
"learning_rate": 9.284814676569431e-06,
"loss": 0.4202,
"step": 450
},
{
"epoch": 1.28125,
"grad_norm": 1.5150361682026248,
"learning_rate": 9.27969543225371e-06,
"loss": 0.4391,
"step": 451
},
{
"epoch": 1.2840909090909092,
"grad_norm": 1.2439757451957247,
"learning_rate": 9.274559353364734e-06,
"loss": 0.3674,
"step": 452
},
{
"epoch": 1.2869318181818181,
"grad_norm": 1.3338341217959293,
"learning_rate": 9.269406460105742e-06,
"loss": 0.4285,
"step": 453
},
{
"epoch": 1.2897727272727273,
"grad_norm": 1.348464897084427,
"learning_rate": 9.264236772746108e-06,
"loss": 0.3749,
"step": 454
},
{
"epoch": 1.2926136363636362,
"grad_norm": 1.532977524300145,
"learning_rate": 9.259050311621274e-06,
"loss": 0.3606,
"step": 455
},
{
"epoch": 1.2954545454545454,
"grad_norm": 1.2025844811833046,
"learning_rate": 9.253847097132656e-06,
"loss": 0.4191,
"step": 456
},
{
"epoch": 1.2982954545454546,
"grad_norm": 1.2628021212775675,
"learning_rate": 9.248627149747573e-06,
"loss": 0.3696,
"step": 457
},
{
"epoch": 1.3011363636363638,
"grad_norm": 1.424313945946166,
"learning_rate": 9.243390489999166e-06,
"loss": 0.3776,
"step": 458
},
{
"epoch": 1.3039772727272727,
"grad_norm": 1.4193614387826192,
"learning_rate": 9.238137138486318e-06,
"loss": 0.4468,
"step": 459
},
{
"epoch": 1.3068181818181819,
"grad_norm": 1.5337967438868354,
"learning_rate": 9.232867115873566e-06,
"loss": 0.4199,
"step": 460
},
{
"epoch": 1.3096590909090908,
"grad_norm": 1.4604547826541918,
"learning_rate": 9.227580442891022e-06,
"loss": 0.4003,
"step": 461
},
{
"epoch": 1.3125,
"grad_norm": 1.3483447354558256,
"learning_rate": 9.222277140334301e-06,
"loss": 0.3692,
"step": 462
},
{
"epoch": 1.3153409090909092,
"grad_norm": 1.3176534368365396,
"learning_rate": 9.21695722906443e-06,
"loss": 0.4082,
"step": 463
},
{
"epoch": 1.3181818181818181,
"grad_norm": 1.3848734791606088,
"learning_rate": 9.211620730007763e-06,
"loss": 0.4134,
"step": 464
},
{
"epoch": 1.3210227272727273,
"grad_norm": 1.3812780788636116,
"learning_rate": 9.206267664155906e-06,
"loss": 0.3784,
"step": 465
},
{
"epoch": 1.3238636363636362,
"grad_norm": 1.2963867485593161,
"learning_rate": 9.200898052565638e-06,
"loss": 0.388,
"step": 466
},
{
"epoch": 1.3267045454545454,
"grad_norm": 1.42585767936649,
"learning_rate": 9.195511916358813e-06,
"loss": 0.3985,
"step": 467
},
{
"epoch": 1.3295454545454546,
"grad_norm": 1.311283985032005,
"learning_rate": 9.19010927672229e-06,
"loss": 0.3719,
"step": 468
},
{
"epoch": 1.3323863636363638,
"grad_norm": 1.297472323865573,
"learning_rate": 9.18469015490785e-06,
"loss": 0.4201,
"step": 469
},
{
"epoch": 1.3352272727272727,
"grad_norm": 1.2429332273904634,
"learning_rate": 9.1792545722321e-06,
"loss": 0.3472,
"step": 470
},
{
"epoch": 1.3380681818181819,
"grad_norm": 1.3735871193712135,
"learning_rate": 9.173802550076402e-06,
"loss": 0.3834,
"step": 471
},
{
"epoch": 1.3409090909090908,
"grad_norm": 1.1849332008114133,
"learning_rate": 9.168334109886785e-06,
"loss": 0.4078,
"step": 472
},
{
"epoch": 1.34375,
"grad_norm": 1.2435873213180686,
"learning_rate": 9.162849273173857e-06,
"loss": 0.4132,
"step": 473
},
{
"epoch": 1.3465909090909092,
"grad_norm": 1.3880977737093316,
"learning_rate": 9.157348061512728e-06,
"loss": 0.3989,
"step": 474
},
{
"epoch": 1.3494318181818181,
"grad_norm": 1.162608532025314,
"learning_rate": 9.151830496542912e-06,
"loss": 0.3894,
"step": 475
},
{
"epoch": 1.3522727272727273,
"grad_norm": 1.3820687112561636,
"learning_rate": 9.146296599968258e-06,
"loss": 0.3629,
"step": 476
},
{
"epoch": 1.3551136363636362,
"grad_norm": 1.235383479146175,
"learning_rate": 9.140746393556853e-06,
"loss": 0.3896,
"step": 477
},
{
"epoch": 1.3579545454545454,
"grad_norm": 1.349928335272851,
"learning_rate": 9.135179899140947e-06,
"loss": 0.4773,
"step": 478
},
{
"epoch": 1.3607954545454546,
"grad_norm": 1.2613773914812836,
"learning_rate": 9.129597138616845e-06,
"loss": 0.3953,
"step": 479
},
{
"epoch": 1.3636363636363638,
"grad_norm": 1.3516210026791848,
"learning_rate": 9.123998133944854e-06,
"loss": 0.3954,
"step": 480
},
{
"epoch": 1.3664772727272727,
"grad_norm": 1.4675800730662938,
"learning_rate": 9.118382907149164e-06,
"loss": 0.3922,
"step": 481
},
{
"epoch": 1.3693181818181819,
"grad_norm": 1.3686097193464257,
"learning_rate": 9.11275148031779e-06,
"loss": 0.4088,
"step": 482
},
{
"epoch": 1.3721590909090908,
"grad_norm": 1.3124849147092428,
"learning_rate": 9.107103875602458e-06,
"loss": 0.4436,
"step": 483
},
{
"epoch": 1.375,
"grad_norm": 1.435628812289037,
"learning_rate": 9.101440115218543e-06,
"loss": 0.3658,
"step": 484
},
{
"epoch": 1.3778409090909092,
"grad_norm": 1.2899778976362926,
"learning_rate": 9.09576022144496e-06,
"loss": 0.4207,
"step": 485
},
{
"epoch": 1.3806818181818181,
"grad_norm": 1.4371410340359985,
"learning_rate": 9.090064216624093e-06,
"loss": 0.3798,
"step": 486
},
{
"epoch": 1.3835227272727273,
"grad_norm": 1.2938067062261525,
"learning_rate": 9.084352123161695e-06,
"loss": 0.3966,
"step": 487
},
{
"epoch": 1.3863636363636362,
"grad_norm": 1.3681448699118202,
"learning_rate": 9.078623963526811e-06,
"loss": 0.4305,
"step": 488
},
{
"epoch": 1.3892045454545454,
"grad_norm": 1.2589613682277516,
"learning_rate": 9.07287976025168e-06,
"loss": 0.3968,
"step": 489
},
{
"epoch": 1.3920454545454546,
"grad_norm": 1.3938984533537444,
"learning_rate": 9.067119535931648e-06,
"loss": 0.382,
"step": 490
},
{
"epoch": 1.3948863636363638,
"grad_norm": 1.2856128305972496,
"learning_rate": 9.061343313225088e-06,
"loss": 0.4349,
"step": 491
},
{
"epoch": 1.3977272727272727,
"grad_norm": 1.3125963539926566,
"learning_rate": 9.055551114853296e-06,
"loss": 0.44,
"step": 492
},
{
"epoch": 1.4005681818181819,
"grad_norm": 1.3955500448358946,
"learning_rate": 9.04974296360042e-06,
"loss": 0.4091,
"step": 493
},
{
"epoch": 1.4034090909090908,
"grad_norm": 1.428413178347213,
"learning_rate": 9.043918882313344e-06,
"loss": 0.4271,
"step": 494
},
{
"epoch": 1.40625,
"grad_norm": 1.2796124312236525,
"learning_rate": 9.038078893901634e-06,
"loss": 0.4278,
"step": 495
},
{
"epoch": 1.4090909090909092,
"grad_norm": 1.2349198829939891,
"learning_rate": 9.032223021337415e-06,
"loss": 0.3759,
"step": 496
},
{
"epoch": 1.4119318181818181,
"grad_norm": 1.3595968193610075,
"learning_rate": 9.026351287655294e-06,
"loss": 0.4327,
"step": 497
},
{
"epoch": 1.4147727272727273,
"grad_norm": 1.5092161593843314,
"learning_rate": 9.020463715952276e-06,
"loss": 0.4251,
"step": 498
},
{
"epoch": 1.4176136363636362,
"grad_norm": 1.2943083527048878,
"learning_rate": 9.014560329387661e-06,
"loss": 0.4012,
"step": 499
},
{
"epoch": 1.4204545454545454,
"grad_norm": 1.3181954379592955,
"learning_rate": 9.00864115118296e-06,
"loss": 0.4113,
"step": 500
},
{
"epoch": 1.4232954545454546,
"grad_norm": 1.2580904992249191,
"learning_rate": 9.002706204621802e-06,
"loss": 0.4157,
"step": 501
},
{
"epoch": 1.4261363636363638,
"grad_norm": 1.352479946463779,
"learning_rate": 8.996755513049844e-06,
"loss": 0.4393,
"step": 502
},
{
"epoch": 1.4289772727272727,
"grad_norm": 1.3576556234541397,
"learning_rate": 8.99078909987467e-06,
"loss": 0.3755,
"step": 503
},
{
"epoch": 1.4318181818181819,
"grad_norm": 1.230294030046518,
"learning_rate": 8.984806988565716e-06,
"loss": 0.3966,
"step": 504
},
{
"epoch": 1.4346590909090908,
"grad_norm": 1.272378129867815,
"learning_rate": 8.978809202654161e-06,
"loss": 0.3982,
"step": 505
},
{
"epoch": 1.4375,
"grad_norm": 1.4640507964444314,
"learning_rate": 8.972795765732847e-06,
"loss": 0.4077,
"step": 506
},
{
"epoch": 1.4403409090909092,
"grad_norm": 1.4122007567764803,
"learning_rate": 8.966766701456177e-06,
"loss": 0.376,
"step": 507
},
{
"epoch": 1.4431818181818181,
"grad_norm": 1.2543181499121292,
"learning_rate": 8.960722033540025e-06,
"loss": 0.3524,
"step": 508
},
{
"epoch": 1.4460227272727273,
"grad_norm": 1.3516761909069253,
"learning_rate": 8.954661785761648e-06,
"loss": 0.3809,
"step": 509
},
{
"epoch": 1.4488636363636362,
"grad_norm": 1.3738331770015468,
"learning_rate": 8.94858598195958e-06,
"loss": 0.4381,
"step": 510
},
{
"epoch": 1.4517045454545454,
"grad_norm": 1.3894759101552216,
"learning_rate": 8.942494646033555e-06,
"loss": 0.4007,
"step": 511
},
{
"epoch": 1.4545454545454546,
"grad_norm": 1.3813101187524204,
"learning_rate": 8.936387801944397e-06,
"loss": 0.4241,
"step": 512
},
{
"epoch": 1.4573863636363638,
"grad_norm": 1.234475792865503,
"learning_rate": 8.930265473713939e-06,
"loss": 0.3475,
"step": 513
},
{
"epoch": 1.4602272727272727,
"grad_norm": 1.340171681220909,
"learning_rate": 8.924127685424914e-06,
"loss": 0.3837,
"step": 514
},
{
"epoch": 1.4630681818181819,
"grad_norm": 1.611032851097366,
"learning_rate": 8.917974461220877e-06,
"loss": 0.4447,
"step": 515
},
{
"epoch": 1.4659090909090908,
"grad_norm": 1.4903364424496537,
"learning_rate": 8.911805825306097e-06,
"loss": 0.4018,
"step": 516
},
{
"epoch": 1.46875,
"grad_norm": 1.3377419588076225,
"learning_rate": 8.905621801945467e-06,
"loss": 0.3697,
"step": 517
},
{
"epoch": 1.4715909090909092,
"grad_norm": 1.2820938683900953,
"learning_rate": 8.899422415464409e-06,
"loss": 0.4119,
"step": 518
},
{
"epoch": 1.4744318181818181,
"grad_norm": 1.1786452967753045,
"learning_rate": 8.893207690248776e-06,
"loss": 0.4287,
"step": 519
},
{
"epoch": 1.4772727272727273,
"grad_norm": 1.3659687009025128,
"learning_rate": 8.88697765074476e-06,
"loss": 0.4042,
"step": 520
},
{
"epoch": 1.4801136363636362,
"grad_norm": 1.4456826342268947,
"learning_rate": 8.880732321458785e-06,
"loss": 0.4582,
"step": 521
},
{
"epoch": 1.4829545454545454,
"grad_norm": 1.3518179145479006,
"learning_rate": 8.87447172695743e-06,
"loss": 0.4094,
"step": 522
},
{
"epoch": 1.4857954545454546,
"grad_norm": 1.32516388869156,
"learning_rate": 8.868195891867315e-06,
"loss": 0.421,
"step": 523
},
{
"epoch": 1.4886363636363638,
"grad_norm": 1.4216865156200345,
"learning_rate": 8.86190484087501e-06,
"loss": 0.4337,
"step": 524
},
{
"epoch": 1.4914772727272727,
"grad_norm": 1.2427141581558576,
"learning_rate": 8.85559859872694e-06,
"loss": 0.3898,
"step": 525
},
{
"epoch": 1.4943181818181819,
"grad_norm": 1.3595678928667188,
"learning_rate": 8.849277190229284e-06,
"loss": 0.3784,
"step": 526
},
{
"epoch": 1.4971590909090908,
"grad_norm": 1.3554942914251902,
"learning_rate": 8.84294064024788e-06,
"loss": 0.4704,
"step": 527
},
{
"epoch": 1.5,
"grad_norm": 1.2506191801860325,
"learning_rate": 8.836588973708129e-06,
"loss": 0.4225,
"step": 528
},
{
"epoch": 1.5028409090909092,
"grad_norm": 1.272132348825335,
"learning_rate": 8.83022221559489e-06,
"loss": 0.4212,
"step": 529
},
{
"epoch": 1.5056818181818183,
"grad_norm": 1.2555892277001355,
"learning_rate": 8.82384039095239e-06,
"loss": 0.4531,
"step": 530
},
{
"epoch": 1.5085227272727273,
"grad_norm": 1.4108423232568015,
"learning_rate": 8.817443524884119e-06,
"loss": 0.3875,
"step": 531
},
{
"epoch": 1.5113636363636362,
"grad_norm": 1.5126815712719472,
"learning_rate": 8.811031642552732e-06,
"loss": 0.3704,
"step": 532
},
{
"epoch": 1.5142045454545454,
"grad_norm": 1.4219068937968729,
"learning_rate": 8.804604769179958e-06,
"loss": 0.3904,
"step": 533
},
{
"epoch": 1.5170454545454546,
"grad_norm": 1.3044529230830835,
"learning_rate": 8.798162930046488e-06,
"loss": 0.3762,
"step": 534
},
{
"epoch": 1.5198863636363638,
"grad_norm": 1.4796842484308415,
"learning_rate": 8.791706150491887e-06,
"loss": 0.3911,
"step": 535
},
{
"epoch": 1.5227272727272727,
"grad_norm": 1.4946259268999356,
"learning_rate": 8.78523445591449e-06,
"loss": 0.3734,
"step": 536
},
{
"epoch": 1.5255681818181817,
"grad_norm": 1.5186972153108664,
"learning_rate": 8.778747871771293e-06,
"loss": 0.419,
"step": 537
},
{
"epoch": 1.5284090909090908,
"grad_norm": 1.2896009388954741,
"learning_rate": 8.772246423577871e-06,
"loss": 0.435,
"step": 538
},
{
"epoch": 1.53125,
"grad_norm": 1.4805755697726184,
"learning_rate": 8.765730136908266e-06,
"loss": 0.3916,
"step": 539
},
{
"epoch": 1.5340909090909092,
"grad_norm": 1.4854240889161028,
"learning_rate": 8.759199037394888e-06,
"loss": 0.3848,
"step": 540
},
{
"epoch": 1.5369318181818183,
"grad_norm": 1.463681525472631,
"learning_rate": 8.752653150728412e-06,
"loss": 0.4095,
"step": 541
},
{
"epoch": 1.5397727272727273,
"grad_norm": 1.5820724211894255,
"learning_rate": 8.746092502657681e-06,
"loss": 0.4613,
"step": 542
},
{
"epoch": 1.5426136363636362,
"grad_norm": 1.505592006271489,
"learning_rate": 8.739517118989606e-06,
"loss": 0.3683,
"step": 543
},
{
"epoch": 1.5454545454545454,
"grad_norm": 1.3692299981481657,
"learning_rate": 8.732927025589058e-06,
"loss": 0.3586,
"step": 544
},
{
"epoch": 1.5482954545454546,
"grad_norm": 1.3878049908700243,
"learning_rate": 8.726322248378775e-06,
"loss": 0.3788,
"step": 545
},
{
"epoch": 1.5511363636363638,
"grad_norm": 1.3928801140937896,
"learning_rate": 8.719702813339248e-06,
"loss": 0.4042,
"step": 546
},
{
"epoch": 1.5539772727272727,
"grad_norm": 1.2922560147345834,
"learning_rate": 8.713068746508633e-06,
"loss": 0.4193,
"step": 547
},
{
"epoch": 1.5568181818181817,
"grad_norm": 1.4892914844476706,
"learning_rate": 8.706420073982636e-06,
"loss": 0.4617,
"step": 548
},
{
"epoch": 1.5596590909090908,
"grad_norm": 1.4717274089795425,
"learning_rate": 8.69975682191442e-06,
"loss": 0.3646,
"step": 549
},
{
"epoch": 1.5625,
"grad_norm": 1.4582367131460128,
"learning_rate": 8.693079016514497e-06,
"loss": 0.4224,
"step": 550
},
{
"epoch": 1.5653409090909092,
"grad_norm": 1.3700688358872546,
"learning_rate": 8.68638668405062e-06,
"loss": 0.3761,
"step": 551
},
{
"epoch": 1.5681818181818183,
"grad_norm": 1.303150019049142,
"learning_rate": 8.679679850847698e-06,
"loss": 0.3941,
"step": 552
},
{
"epoch": 1.5710227272727273,
"grad_norm": 1.3691241123764724,
"learning_rate": 8.672958543287666e-06,
"loss": 0.4003,
"step": 553
},
{
"epoch": 1.5738636363636362,
"grad_norm": 1.3220887396048668,
"learning_rate": 8.666222787809407e-06,
"loss": 0.4369,
"step": 554
},
{
"epoch": 1.5767045454545454,
"grad_norm": 1.3926991655468106,
"learning_rate": 8.659472610908628e-06,
"loss": 0.3942,
"step": 555
},
{
"epoch": 1.5795454545454546,
"grad_norm": 1.3537640175865195,
"learning_rate": 8.652708039137767e-06,
"loss": 0.3829,
"step": 556
},
{
"epoch": 1.5823863636363638,
"grad_norm": 1.3430604939036295,
"learning_rate": 8.645929099105886e-06,
"loss": 0.4007,
"step": 557
},
{
"epoch": 1.5852272727272727,
"grad_norm": 1.236166779871983,
"learning_rate": 8.639135817478566e-06,
"loss": 0.3855,
"step": 558
},
{
"epoch": 1.5880681818181817,
"grad_norm": 1.2858592765808123,
"learning_rate": 8.632328220977801e-06,
"loss": 0.3809,
"step": 559
},
{
"epoch": 1.5909090909090908,
"grad_norm": 1.346749375703671,
"learning_rate": 8.625506336381895e-06,
"loss": 0.3927,
"step": 560
},
{
"epoch": 1.59375,
"grad_norm": 1.3865417442370538,
"learning_rate": 8.61867019052535e-06,
"loss": 0.406,
"step": 561
},
{
"epoch": 1.5965909090909092,
"grad_norm": 1.3928703341415491,
"learning_rate": 8.611819810298778e-06,
"loss": 0.4348,
"step": 562
},
{
"epoch": 1.5994318181818183,
"grad_norm": 1.3346447546562574,
"learning_rate": 8.604955222648772e-06,
"loss": 0.3923,
"step": 563
},
{
"epoch": 1.6022727272727273,
"grad_norm": 1.3874781514983023,
"learning_rate": 8.598076454577815e-06,
"loss": 0.3811,
"step": 564
},
{
"epoch": 1.6051136363636362,
"grad_norm": 1.3892030370722708,
"learning_rate": 8.591183533144172e-06,
"loss": 0.4168,
"step": 565
},
{
"epoch": 1.6079545454545454,
"grad_norm": 1.39476614799105,
"learning_rate": 8.584276485461775e-06,
"loss": 0.3924,
"step": 566
},
{
"epoch": 1.6107954545454546,
"grad_norm": 1.4734200006566598,
"learning_rate": 8.577355338700133e-06,
"loss": 0.3808,
"step": 567
},
{
"epoch": 1.6136363636363638,
"grad_norm": 1.4118837014643815,
"learning_rate": 8.570420120084208e-06,
"loss": 0.456,
"step": 568
},
{
"epoch": 1.6164772727272727,
"grad_norm": 1.3035533880676229,
"learning_rate": 8.563470856894316e-06,
"loss": 0.4052,
"step": 569
},
{
"epoch": 1.6193181818181817,
"grad_norm": 1.2585997343091353,
"learning_rate": 8.556507576466017e-06,
"loss": 0.4055,
"step": 570
},
{
"epoch": 1.6221590909090908,
"grad_norm": 1.2827553058465992,
"learning_rate": 8.549530306190015e-06,
"loss": 0.4242,
"step": 571
},
{
"epoch": 1.625,
"grad_norm": 1.311108635707505,
"learning_rate": 8.542539073512038e-06,
"loss": 0.4325,
"step": 572
},
{
"epoch": 1.6278409090909092,
"grad_norm": 1.452722602140385,
"learning_rate": 8.535533905932739e-06,
"loss": 0.359,
"step": 573
},
{
"epoch": 1.6306818181818183,
"grad_norm": 1.4216842513339472,
"learning_rate": 8.528514831007587e-06,
"loss": 0.3865,
"step": 574
},
{
"epoch": 1.6335227272727273,
"grad_norm": 1.104281881309897,
"learning_rate": 8.521481876346751e-06,
"loss": 0.3927,
"step": 575
},
{
"epoch": 1.6363636363636362,
"grad_norm": 1.373540934205005,
"learning_rate": 8.514435069615005e-06,
"loss": 0.3735,
"step": 576
},
{
"epoch": 1.6392045454545454,
"grad_norm": 1.4721648721287588,
"learning_rate": 8.507374438531606e-06,
"loss": 0.4089,
"step": 577
},
{
"epoch": 1.6420454545454546,
"grad_norm": 1.4330241380765012,
"learning_rate": 8.500300010870195e-06,
"loss": 0.3962,
"step": 578
},
{
"epoch": 1.6448863636363638,
"grad_norm": 1.314258656388838,
"learning_rate": 8.493211814458674e-06,
"loss": 0.3902,
"step": 579
},
{
"epoch": 1.6477272727272727,
"grad_norm": 1.3180359191061783,
"learning_rate": 8.486109877179115e-06,
"loss": 0.4559,
"step": 580
},
{
"epoch": 1.6505681818181817,
"grad_norm": 1.2083828163470816,
"learning_rate": 8.478994226967638e-06,
"loss": 0.3928,
"step": 581
},
{
"epoch": 1.6534090909090908,
"grad_norm": 1.274567661010079,
"learning_rate": 8.471864891814304e-06,
"loss": 0.4143,
"step": 582
},
{
"epoch": 1.65625,
"grad_norm": 1.3144626541356723,
"learning_rate": 8.464721899763003e-06,
"loss": 0.3369,
"step": 583
},
{
"epoch": 1.6590909090909092,
"grad_norm": 1.3987801501809722,
"learning_rate": 8.457565278911349e-06,
"loss": 0.4459,
"step": 584
},
{
"epoch": 1.6619318181818183,
"grad_norm": 1.235004230998312,
"learning_rate": 8.450395057410561e-06,
"loss": 0.3851,
"step": 585
},
{
"epoch": 1.6647727272727273,
"grad_norm": 1.3390522796730047,
"learning_rate": 8.443211263465362e-06,
"loss": 0.3945,
"step": 586
},
{
"epoch": 1.6676136363636362,
"grad_norm": 1.4255845992278726,
"learning_rate": 8.436013925333868e-06,
"loss": 0.4216,
"step": 587
},
{
"epoch": 1.6704545454545454,
"grad_norm": 1.382760628657945,
"learning_rate": 8.42880307132746e-06,
"loss": 0.3993,
"step": 588
},
{
"epoch": 1.6732954545454546,
"grad_norm": 1.3285153601515391,
"learning_rate": 8.421578729810693e-06,
"loss": 0.4177,
"step": 589
},
{
"epoch": 1.6761363636363638,
"grad_norm": 1.2999965518088938,
"learning_rate": 8.414340929201175e-06,
"loss": 0.424,
"step": 590
},
{
"epoch": 1.6789772727272727,
"grad_norm": 1.2296769461458794,
"learning_rate": 8.407089697969458e-06,
"loss": 0.4055,
"step": 591
},
{
"epoch": 1.6818181818181817,
"grad_norm": 1.249775689774472,
"learning_rate": 8.39982506463892e-06,
"loss": 0.3783,
"step": 592
},
{
"epoch": 1.6846590909090908,
"grad_norm": 1.3164118726921976,
"learning_rate": 8.392547057785662e-06,
"loss": 0.4116,
"step": 593
},
{
"epoch": 1.6875,
"grad_norm": 1.3465959411917943,
"learning_rate": 8.38525570603839e-06,
"loss": 0.4228,
"step": 594
},
{
"epoch": 1.6903409090909092,
"grad_norm": 1.3714243038000036,
"learning_rate": 8.377951038078303e-06,
"loss": 0.4441,
"step": 595
},
{
"epoch": 1.6931818181818183,
"grad_norm": 1.334863296659028,
"learning_rate": 8.370633082638977e-06,
"loss": 0.3498,
"step": 596
},
{
"epoch": 1.6960227272727273,
"grad_norm": 1.3740648201371748,
"learning_rate": 8.363301868506264e-06,
"loss": 0.4077,
"step": 597
},
{
"epoch": 1.6988636363636362,
"grad_norm": 1.2784134213122538,
"learning_rate": 8.35595742451816e-06,
"loss": 0.412,
"step": 598
},
{
"epoch": 1.7017045454545454,
"grad_norm": 1.2139020970296652,
"learning_rate": 8.34859977956471e-06,
"loss": 0.4094,
"step": 599
},
{
"epoch": 1.7045454545454546,
"grad_norm": 1.3204896950697897,
"learning_rate": 8.341228962587881e-06,
"loss": 0.3752,
"step": 600
},
{
"epoch": 1.7073863636363638,
"grad_norm": 1.4708757740664673,
"learning_rate": 8.33384500258146e-06,
"loss": 0.389,
"step": 601
},
{
"epoch": 1.7102272727272727,
"grad_norm": 1.4266850812841863,
"learning_rate": 8.326447928590921e-06,
"loss": 0.4396,
"step": 602
},
{
"epoch": 1.7130681818181817,
"grad_norm": 1.1175427095357402,
"learning_rate": 8.319037769713338e-06,
"loss": 0.382,
"step": 603
},
{
"epoch": 1.7159090909090908,
"grad_norm": 1.281173570535696,
"learning_rate": 8.311614555097245e-06,
"loss": 0.4192,
"step": 604
},
{
"epoch": 1.71875,
"grad_norm": 1.240781935793036,
"learning_rate": 8.304178313942536e-06,
"loss": 0.3941,
"step": 605
},
{
"epoch": 1.7215909090909092,
"grad_norm": 1.3452354473216068,
"learning_rate": 8.296729075500345e-06,
"loss": 0.4161,
"step": 606
},
{
"epoch": 1.7244318181818183,
"grad_norm": 1.3547326876440298,
"learning_rate": 8.289266869072933e-06,
"loss": 0.4318,
"step": 607
},
{
"epoch": 1.7272727272727273,
"grad_norm": 1.3778793045074398,
"learning_rate": 8.281791724013571e-06,
"loss": 0.345,
"step": 608
},
{
"epoch": 1.7301136363636362,
"grad_norm": 1.4145733189261174,
"learning_rate": 8.274303669726427e-06,
"loss": 0.4236,
"step": 609
},
{
"epoch": 1.7329545454545454,
"grad_norm": 1.4088839074475497,
"learning_rate": 8.266802735666443e-06,
"loss": 0.4407,
"step": 610
},
{
"epoch": 1.7357954545454546,
"grad_norm": 1.3119328043721623,
"learning_rate": 8.259288951339233e-06,
"loss": 0.3939,
"step": 611
},
{
"epoch": 1.7386363636363638,
"grad_norm": 1.309429912330872,
"learning_rate": 8.251762346300954e-06,
"loss": 0.4109,
"step": 612
},
{
"epoch": 1.7414772727272727,
"grad_norm": 1.3345438738994535,
"learning_rate": 8.244222950158194e-06,
"loss": 0.4536,
"step": 613
},
{
"epoch": 1.7443181818181817,
"grad_norm": 1.453082925840765,
"learning_rate": 8.236670792567856e-06,
"loss": 0.4107,
"step": 614
},
{
"epoch": 1.7471590909090908,
"grad_norm": 1.32130004870712,
"learning_rate": 8.229105903237045e-06,
"loss": 0.3599,
"step": 615
},
{
"epoch": 1.75,
"grad_norm": 1.2225515703742833,
"learning_rate": 8.221528311922941e-06,
"loss": 0.4239,
"step": 616
},
{
"epoch": 1.7528409090909092,
"grad_norm": 1.250676696637379,
"learning_rate": 8.213938048432697e-06,
"loss": 0.378,
"step": 617
},
{
"epoch": 1.7556818181818183,
"grad_norm": 1.3753326745883003,
"learning_rate": 8.206335142623305e-06,
"loss": 0.4368,
"step": 618
},
{
"epoch": 1.7585227272727273,
"grad_norm": 1.4771926734091496,
"learning_rate": 8.198719624401493e-06,
"loss": 0.408,
"step": 619
},
{
"epoch": 1.7613636363636362,
"grad_norm": 1.3052916853608703,
"learning_rate": 8.191091523723594e-06,
"loss": 0.3869,
"step": 620
},
{
"epoch": 1.7642045454545454,
"grad_norm": 1.2079205005036262,
"learning_rate": 8.183450870595443e-06,
"loss": 0.3799,
"step": 621
},
{
"epoch": 1.7670454545454546,
"grad_norm": 1.3981335006171198,
"learning_rate": 8.175797695072245e-06,
"loss": 0.4104,
"step": 622
},
{
"epoch": 1.7698863636363638,
"grad_norm": 1.2549497514899004,
"learning_rate": 8.168132027258467e-06,
"loss": 0.3949,
"step": 623
},
{
"epoch": 1.7727272727272727,
"grad_norm": 1.1515091844850416,
"learning_rate": 8.160453897307714e-06,
"loss": 0.4306,
"step": 624
},
{
"epoch": 1.7755681818181817,
"grad_norm": 1.260271278484822,
"learning_rate": 8.152763335422612e-06,
"loss": 0.4018,
"step": 625
},
{
"epoch": 1.7784090909090908,
"grad_norm": 1.2456788115801498,
"learning_rate": 8.145060371854692e-06,
"loss": 0.4038,
"step": 626
},
{
"epoch": 1.78125,
"grad_norm": 1.3353759651701624,
"learning_rate": 8.13734503690426e-06,
"loss": 0.4053,
"step": 627
},
{
"epoch": 1.7840909090909092,
"grad_norm": 1.2141575196798886,
"learning_rate": 8.129617360920297e-06,
"loss": 0.374,
"step": 628
},
{
"epoch": 1.7869318181818183,
"grad_norm": 1.2568084197585532,
"learning_rate": 8.121877374300318e-06,
"loss": 0.3789,
"step": 629
},
{
"epoch": 1.7897727272727273,
"grad_norm": 1.3228386855112886,
"learning_rate": 8.11412510749027e-06,
"loss": 0.4189,
"step": 630
},
{
"epoch": 1.7926136363636362,
"grad_norm": 1.2450848434801394,
"learning_rate": 8.106360590984406e-06,
"loss": 0.4058,
"step": 631
},
{
"epoch": 1.7954545454545454,
"grad_norm": 1.2751134284940169,
"learning_rate": 8.098583855325157e-06,
"loss": 0.3609,
"step": 632
},
{
"epoch": 1.7982954545454546,
"grad_norm": 1.2613593262795375,
"learning_rate": 8.090794931103026e-06,
"loss": 0.4208,
"step": 633
},
{
"epoch": 1.8011363636363638,
"grad_norm": 1.3187892972599105,
"learning_rate": 8.08299384895646e-06,
"loss": 0.3644,
"step": 634
},
{
"epoch": 1.8039772727272727,
"grad_norm": 1.3412404440839794,
"learning_rate": 8.075180639571726e-06,
"loss": 0.3943,
"step": 635
},
{
"epoch": 1.8068181818181817,
"grad_norm": 1.3416612818936742,
"learning_rate": 8.067355333682799e-06,
"loss": 0.3722,
"step": 636
},
{
"epoch": 1.8096590909090908,
"grad_norm": 1.2215225081773167,
"learning_rate": 8.059517962071234e-06,
"loss": 0.373,
"step": 637
},
{
"epoch": 1.8125,
"grad_norm": 1.2968614196474024,
"learning_rate": 8.05166855556605e-06,
"loss": 0.3954,
"step": 638
},
{
"epoch": 1.8153409090909092,
"grad_norm": 1.2381952159486795,
"learning_rate": 8.043807145043604e-06,
"loss": 0.4333,
"step": 639
},
{
"epoch": 1.8181818181818183,
"grad_norm": 1.3448593265426114,
"learning_rate": 8.035933761427475e-06,
"loss": 0.3933,
"step": 640
},
{
"epoch": 1.8210227272727273,
"grad_norm": 1.3928700734438324,
"learning_rate": 8.028048435688333e-06,
"loss": 0.3611,
"step": 641
},
{
"epoch": 1.8238636363636362,
"grad_norm": 1.324880463233581,
"learning_rate": 8.020151198843833e-06,
"loss": 0.3882,
"step": 642
},
{
"epoch": 1.8267045454545454,
"grad_norm": 1.2671162081281926,
"learning_rate": 8.012242081958477e-06,
"loss": 0.3475,
"step": 643
},
{
"epoch": 1.8295454545454546,
"grad_norm": 1.3107781953106952,
"learning_rate": 8.004321116143496e-06,
"loss": 0.3596,
"step": 644
},
{
"epoch": 1.8323863636363638,
"grad_norm": 1.3666505259241546,
"learning_rate": 7.996388332556735e-06,
"loss": 0.4169,
"step": 645
},
{
"epoch": 1.8352272727272727,
"grad_norm": 1.4514440975950802,
"learning_rate": 7.988443762402525e-06,
"loss": 0.3918,
"step": 646
},
{
"epoch": 1.8380681818181817,
"grad_norm": 1.2979859649109158,
"learning_rate": 7.980487436931558e-06,
"loss": 0.4341,
"step": 647
},
{
"epoch": 1.8409090909090908,
"grad_norm": 1.2503565961960492,
"learning_rate": 7.972519387440767e-06,
"loss": 0.4058,
"step": 648
},
{
"epoch": 1.84375,
"grad_norm": 1.2538862467599425,
"learning_rate": 7.964539645273204e-06,
"loss": 0.4106,
"step": 649
},
{
"epoch": 1.8465909090909092,
"grad_norm": 1.2837654267836336,
"learning_rate": 7.956548241817914e-06,
"loss": 0.3882,
"step": 650
},
{
"epoch": 1.8494318181818183,
"grad_norm": 1.283512404811162,
"learning_rate": 7.948545208509811e-06,
"loss": 0.4181,
"step": 651
},
{
"epoch": 1.8522727272727273,
"grad_norm": 1.3386923463737823,
"learning_rate": 7.940530576829562e-06,
"loss": 0.3923,
"step": 652
},
{
"epoch": 1.8551136363636362,
"grad_norm": 1.2341509248969929,
"learning_rate": 7.932504378303452e-06,
"loss": 0.365,
"step": 653
},
{
"epoch": 1.8579545454545454,
"grad_norm": 1.216460582958605,
"learning_rate": 7.924466644503265e-06,
"loss": 0.3495,
"step": 654
},
{
"epoch": 1.8607954545454546,
"grad_norm": 1.3130118175509913,
"learning_rate": 7.916417407046166e-06,
"loss": 0.3891,
"step": 655
},
{
"epoch": 1.8636363636363638,
"grad_norm": 1.3009995160105379,
"learning_rate": 7.908356697594562e-06,
"loss": 0.3555,
"step": 656
},
{
"epoch": 1.8664772727272727,
"grad_norm": 1.2983355016455334,
"learning_rate": 7.900284547855992e-06,
"loss": 0.4183,
"step": 657
},
{
"epoch": 1.8693181818181817,
"grad_norm": 1.3025519454264027,
"learning_rate": 7.892200989582994e-06,
"loss": 0.3832,
"step": 658
},
{
"epoch": 1.8721590909090908,
"grad_norm": 1.4015896773460164,
"learning_rate": 7.884106054572987e-06,
"loss": 0.3852,
"step": 659
},
{
"epoch": 1.875,
"grad_norm": 1.3658709627701167,
"learning_rate": 7.875999774668135e-06,
"loss": 0.4103,
"step": 660
},
{
"epoch": 1.8778409090909092,
"grad_norm": 1.1749326223713263,
"learning_rate": 7.86788218175523e-06,
"loss": 0.4185,
"step": 661
},
{
"epoch": 1.8806818181818183,
"grad_norm": 1.3300104427508925,
"learning_rate": 7.859753307765571e-06,
"loss": 0.4146,
"step": 662
},
{
"epoch": 1.8835227272727273,
"grad_norm": 1.3350244520568906,
"learning_rate": 7.851613184674821e-06,
"loss": 0.3795,
"step": 663
},
{
"epoch": 1.8863636363636362,
"grad_norm": 1.3114376934881007,
"learning_rate": 7.843461844502903e-06,
"loss": 0.3887,
"step": 664
},
{
"epoch": 1.8892045454545454,
"grad_norm": 1.263848611253203,
"learning_rate": 7.835299319313854e-06,
"loss": 0.3627,
"step": 665
},
{
"epoch": 1.8920454545454546,
"grad_norm": 1.3067022373616142,
"learning_rate": 7.827125641215718e-06,
"loss": 0.4148,
"step": 666
},
{
"epoch": 1.8948863636363638,
"grad_norm": 1.3313015879998442,
"learning_rate": 7.818940842360404e-06,
"loss": 0.4221,
"step": 667
},
{
"epoch": 1.8977272727272727,
"grad_norm": 1.286432747246891,
"learning_rate": 7.810744954943564e-06,
"loss": 0.3851,
"step": 668
},
{
"epoch": 1.9005681818181817,
"grad_norm": 1.2339003550511074,
"learning_rate": 7.80253801120447e-06,
"loss": 0.3771,
"step": 669
},
{
"epoch": 1.9034090909090908,
"grad_norm": 1.2667417343121634,
"learning_rate": 7.79432004342589e-06,
"loss": 0.4144,
"step": 670
},
{
"epoch": 1.90625,
"grad_norm": 1.3970266065185077,
"learning_rate": 7.78609108393395e-06,
"loss": 0.3739,
"step": 671
},
{
"epoch": 1.9090909090909092,
"grad_norm": 1.40544283943565,
"learning_rate": 7.777851165098012e-06,
"loss": 0.446,
"step": 672
},
{
"epoch": 1.9119318181818183,
"grad_norm": 1.3511713634953924,
"learning_rate": 7.769600319330553e-06,
"loss": 0.4265,
"step": 673
},
{
"epoch": 1.9147727272727273,
"grad_norm": 1.362587721165509,
"learning_rate": 7.761338579087026e-06,
"loss": 0.4179,
"step": 674
},
{
"epoch": 1.9176136363636362,
"grad_norm": 1.3317227014945128,
"learning_rate": 7.753065976865745e-06,
"loss": 0.444,
"step": 675
},
{
"epoch": 1.9204545454545454,
"grad_norm": 1.353251061657008,
"learning_rate": 7.744782545207745e-06,
"loss": 0.4288,
"step": 676
},
{
"epoch": 1.9232954545454546,
"grad_norm": 1.3310135379731283,
"learning_rate": 7.736488316696663e-06,
"loss": 0.363,
"step": 677
},
{
"epoch": 1.9261363636363638,
"grad_norm": 1.4336597059726608,
"learning_rate": 7.728183323958603e-06,
"loss": 0.3774,
"step": 678
},
{
"epoch": 1.9289772727272727,
"grad_norm": 1.3096730175462812,
"learning_rate": 7.719867599662017e-06,
"loss": 0.3815,
"step": 679
},
{
"epoch": 1.9318181818181817,
"grad_norm": 1.2452300162277348,
"learning_rate": 7.711541176517563e-06,
"loss": 0.3523,
"step": 680
},
{
"epoch": 1.9346590909090908,
"grad_norm": 1.2601600689651646,
"learning_rate": 7.703204087277989e-06,
"loss": 0.4133,
"step": 681
},
{
"epoch": 1.9375,
"grad_norm": 1.3131138015057662,
"learning_rate": 7.694856364737997e-06,
"loss": 0.3528,
"step": 682
},
{
"epoch": 1.9403409090909092,
"grad_norm": 1.3174274202800598,
"learning_rate": 7.686498041734121e-06,
"loss": 0.3687,
"step": 683
},
{
"epoch": 1.9431818181818183,
"grad_norm": 1.382354479301822,
"learning_rate": 7.678129151144582e-06,
"loss": 0.385,
"step": 684
},
{
"epoch": 1.9460227272727273,
"grad_norm": 1.342621819895786,
"learning_rate": 7.669749725889182e-06,
"loss": 0.4192,
"step": 685
},
{
"epoch": 1.9488636363636362,
"grad_norm": 1.2633452874742823,
"learning_rate": 7.661359798929152e-06,
"loss": 0.4359,
"step": 686
},
{
"epoch": 1.9517045454545454,
"grad_norm": 1.4034254507755521,
"learning_rate": 7.65295940326704e-06,
"loss": 0.4036,
"step": 687
},
{
"epoch": 1.9545454545454546,
"grad_norm": 1.2499008102214813,
"learning_rate": 7.644548571946569e-06,
"loss": 0.402,
"step": 688
},
{
"epoch": 1.9573863636363638,
"grad_norm": 1.4019764959578607,
"learning_rate": 7.636127338052513e-06,
"loss": 0.4168,
"step": 689
},
{
"epoch": 1.9602272727272727,
"grad_norm": 1.3111166839375912,
"learning_rate": 7.627695734710565e-06,
"loss": 0.4128,
"step": 690
},
{
"epoch": 1.9630681818181817,
"grad_norm": 1.2665556025054903,
"learning_rate": 7.619253795087209e-06,
"loss": 0.3456,
"step": 691
},
{
"epoch": 1.9659090909090908,
"grad_norm": 1.221754638519089,
"learning_rate": 7.610801552389584e-06,
"loss": 0.4104,
"step": 692
},
{
"epoch": 1.96875,
"grad_norm": 1.1457951321520503,
"learning_rate": 7.602339039865362e-06,
"loss": 0.3633,
"step": 693
},
{
"epoch": 1.9715909090909092,
"grad_norm": 1.344478912554315,
"learning_rate": 7.593866290802608e-06,
"loss": 0.451,
"step": 694
},
{
"epoch": 1.9744318181818183,
"grad_norm": 1.4122647415470586,
"learning_rate": 7.5853833385296545e-06,
"loss": 0.3944,
"step": 695
},
{
"epoch": 1.9772727272727273,
"grad_norm": 1.4648734442255729,
"learning_rate": 7.576890216414973e-06,
"loss": 0.4243,
"step": 696
},
{
"epoch": 1.9801136363636362,
"grad_norm": 1.1880578823116779,
"learning_rate": 7.568386957867033e-06,
"loss": 0.3682,
"step": 697
},
{
"epoch": 1.9829545454545454,
"grad_norm": 1.388741245940409,
"learning_rate": 7.559873596334179e-06,
"loss": 0.3952,
"step": 698
},
{
"epoch": 1.9857954545454546,
"grad_norm": 1.3426340364215053,
"learning_rate": 7.5513501653045e-06,
"loss": 0.3859,
"step": 699
},
{
"epoch": 1.9886363636363638,
"grad_norm": 1.342227819106623,
"learning_rate": 7.542816698305686e-06,
"loss": 0.3573,
"step": 700
},
{
"epoch": 1.9914772727272727,
"grad_norm": 1.2713457352199766,
"learning_rate": 7.534273228904916e-06,
"loss": 0.3714,
"step": 701
},
{
"epoch": 1.9943181818181817,
"grad_norm": 1.2814840874973006,
"learning_rate": 7.525719790708703e-06,
"loss": 0.4265,
"step": 702
},
{
"epoch": 1.9971590909090908,
"grad_norm": 1.224475509495077,
"learning_rate": 7.5171564173627795e-06,
"loss": 0.3623,
"step": 703
},
{
"epoch": 2.0,
"grad_norm": 1.3940157665360537,
"learning_rate": 7.508583142551959e-06,
"loss": 0.3359,
"step": 704
}
],
"logging_steps": 1,
"max_steps": 1760,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 352,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 727389995073536.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}