model_name / checkpoint-338 /trainer_state.json
alykassem's picture
Training in progress, step 338, checkpoint
6225477 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 338,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.002962962962962963,
"grad_norm": 82.5,
"learning_rate": 0.0,
"loss": 5.4851,
"step": 1
},
{
"epoch": 0.005925925925925926,
"grad_norm": 81.5,
"learning_rate": 1.0000000000000002e-06,
"loss": 5.5219,
"step": 2
},
{
"epoch": 0.008888888888888889,
"grad_norm": 83.0,
"learning_rate": 2.0000000000000003e-06,
"loss": 5.7155,
"step": 3
},
{
"epoch": 0.011851851851851851,
"grad_norm": 74.0,
"learning_rate": 3e-06,
"loss": 5.5065,
"step": 4
},
{
"epoch": 0.014814814814814815,
"grad_norm": 60.5,
"learning_rate": 4.000000000000001e-06,
"loss": 4.8696,
"step": 5
},
{
"epoch": 0.017777777777777778,
"grad_norm": 48.25,
"learning_rate": 5e-06,
"loss": 4.3835,
"step": 6
},
{
"epoch": 0.02074074074074074,
"grad_norm": 40.5,
"learning_rate": 6e-06,
"loss": 4.1231,
"step": 7
},
{
"epoch": 0.023703703703703703,
"grad_norm": 50.5,
"learning_rate": 7e-06,
"loss": 3.6886,
"step": 8
},
{
"epoch": 0.02666666666666667,
"grad_norm": 34.5,
"learning_rate": 8.000000000000001e-06,
"loss": 3.3401,
"step": 9
},
{
"epoch": 0.02962962962962963,
"grad_norm": 27.125,
"learning_rate": 9e-06,
"loss": 3.043,
"step": 10
},
{
"epoch": 0.03259259259259259,
"grad_norm": 21.5,
"learning_rate": 1e-05,
"loss": 2.7408,
"step": 11
},
{
"epoch": 0.035555555555555556,
"grad_norm": 19.875,
"learning_rate": 1.1000000000000001e-05,
"loss": 2.5235,
"step": 12
},
{
"epoch": 0.03851851851851852,
"grad_norm": 19.0,
"learning_rate": 1.2e-05,
"loss": 2.4532,
"step": 13
},
{
"epoch": 0.04148148148148148,
"grad_norm": 19.625,
"learning_rate": 1.3000000000000001e-05,
"loss": 2.3238,
"step": 14
},
{
"epoch": 0.044444444444444446,
"grad_norm": 20.125,
"learning_rate": 1.4e-05,
"loss": 2.2851,
"step": 15
},
{
"epoch": 0.047407407407407405,
"grad_norm": 17.5,
"learning_rate": 1.5000000000000002e-05,
"loss": 2.2676,
"step": 16
},
{
"epoch": 0.05037037037037037,
"grad_norm": 16.0,
"learning_rate": 1.6000000000000003e-05,
"loss": 2.1502,
"step": 17
},
{
"epoch": 0.05333333333333334,
"grad_norm": 14.4375,
"learning_rate": 1.7e-05,
"loss": 2.1293,
"step": 18
},
{
"epoch": 0.056296296296296296,
"grad_norm": 15.5625,
"learning_rate": 1.8e-05,
"loss": 2.0649,
"step": 19
},
{
"epoch": 0.05925925925925926,
"grad_norm": 19.0,
"learning_rate": 1.9e-05,
"loss": 1.961,
"step": 20
},
{
"epoch": 0.06222222222222222,
"grad_norm": 14.375,
"learning_rate": 2e-05,
"loss": 1.9368,
"step": 21
},
{
"epoch": 0.06518518518518518,
"grad_norm": 15.75,
"learning_rate": 1.999951200881446e-05,
"loss": 1.9511,
"step": 22
},
{
"epoch": 0.06814814814814815,
"grad_norm": 13.5625,
"learning_rate": 1.999804808288491e-05,
"loss": 1.8874,
"step": 23
},
{
"epoch": 0.07111111111111111,
"grad_norm": 13.4375,
"learning_rate": 1.9995608365087945e-05,
"loss": 1.7678,
"step": 24
},
{
"epoch": 0.07407407407407407,
"grad_norm": 14.25,
"learning_rate": 1.999219309353572e-05,
"loss": 1.8061,
"step": 25
},
{
"epoch": 0.07703703703703704,
"grad_norm": 13.9375,
"learning_rate": 1.9987802601552717e-05,
"loss": 1.778,
"step": 26
},
{
"epoch": 0.08,
"grad_norm": 11.3125,
"learning_rate": 1.9982437317643218e-05,
"loss": 1.5772,
"step": 27
},
{
"epoch": 0.08296296296296296,
"grad_norm": 13.3125,
"learning_rate": 1.9976097765449463e-05,
"loss": 1.7557,
"step": 28
},
{
"epoch": 0.08592592592592592,
"grad_norm": 12.9375,
"learning_rate": 1.9968784563700586e-05,
"loss": 1.6816,
"step": 29
},
{
"epoch": 0.08888888888888889,
"grad_norm": 12.8125,
"learning_rate": 1.996049842615217e-05,
"loss": 1.742,
"step": 30
},
{
"epoch": 0.09185185185185185,
"grad_norm": 12.125,
"learning_rate": 1.9951240161516643e-05,
"loss": 1.7055,
"step": 31
},
{
"epoch": 0.09481481481481481,
"grad_norm": 11.5625,
"learning_rate": 1.9941010673384307e-05,
"loss": 1.6907,
"step": 32
},
{
"epoch": 0.09777777777777778,
"grad_norm": 11.4375,
"learning_rate": 1.992981096013517e-05,
"loss": 1.7213,
"step": 33
},
{
"epoch": 0.10074074074074074,
"grad_norm": 12.125,
"learning_rate": 1.9917642114841505e-05,
"loss": 1.8787,
"step": 34
},
{
"epoch": 0.1037037037037037,
"grad_norm": 11.125,
"learning_rate": 1.990450532516116e-05,
"loss": 1.6781,
"step": 35
},
{
"epoch": 0.10666666666666667,
"grad_norm": 12.8125,
"learning_rate": 1.9890401873221642e-05,
"loss": 1.5917,
"step": 36
},
{
"epoch": 0.10962962962962963,
"grad_norm": 12.0625,
"learning_rate": 1.9875333135495e-05,
"loss": 1.7399,
"step": 37
},
{
"epoch": 0.11259259259259259,
"grad_norm": 11.6875,
"learning_rate": 1.985930058266348e-05,
"loss": 1.8376,
"step": 38
},
{
"epoch": 0.11555555555555555,
"grad_norm": 10.9375,
"learning_rate": 1.984230577947597e-05,
"loss": 1.7302,
"step": 39
},
{
"epoch": 0.11851851851851852,
"grad_norm": 10.125,
"learning_rate": 1.9824350384595295e-05,
"loss": 1.6194,
"step": 40
},
{
"epoch": 0.12148148148148148,
"grad_norm": 10.375,
"learning_rate": 1.9805436150436352e-05,
"loss": 1.6648,
"step": 41
},
{
"epoch": 0.12444444444444444,
"grad_norm": 12.3125,
"learning_rate": 1.9785564922995042e-05,
"loss": 1.7417,
"step": 42
},
{
"epoch": 0.1274074074074074,
"grad_norm": 11.625,
"learning_rate": 1.9764738641668137e-05,
"loss": 1.6403,
"step": 43
},
{
"epoch": 0.13037037037037036,
"grad_norm": 10.625,
"learning_rate": 1.9742959339063977e-05,
"loss": 1.6204,
"step": 44
},
{
"epoch": 0.13333333333333333,
"grad_norm": 11.75,
"learning_rate": 1.972022914080411e-05,
"loss": 1.7575,
"step": 45
},
{
"epoch": 0.1362962962962963,
"grad_norm": 12.625,
"learning_rate": 1.9696550265315805e-05,
"loss": 1.594,
"step": 46
},
{
"epoch": 0.13925925925925925,
"grad_norm": 11.3125,
"learning_rate": 1.9671925023615572e-05,
"loss": 1.6259,
"step": 47
},
{
"epoch": 0.14222222222222222,
"grad_norm": 11.9375,
"learning_rate": 1.964635581908359e-05,
"loss": 1.7574,
"step": 48
},
{
"epoch": 0.1451851851851852,
"grad_norm": 10.75,
"learning_rate": 1.961984514722914e-05,
"loss": 1.7021,
"step": 49
},
{
"epoch": 0.14814814814814814,
"grad_norm": 10.625,
"learning_rate": 1.9592395595447064e-05,
"loss": 1.5892,
"step": 50
},
{
"epoch": 0.1511111111111111,
"grad_norm": 10.625,
"learning_rate": 1.9564009842765225e-05,
"loss": 1.6846,
"step": 51
},
{
"epoch": 0.15407407407407409,
"grad_norm": 10.375,
"learning_rate": 1.9534690659583045e-05,
"loss": 1.6761,
"step": 52
},
{
"epoch": 0.15703703703703703,
"grad_norm": 10.625,
"learning_rate": 1.9504440907401113e-05,
"loss": 1.4986,
"step": 53
},
{
"epoch": 0.16,
"grad_norm": 11.4375,
"learning_rate": 1.9473263538541916e-05,
"loss": 1.6183,
"step": 54
},
{
"epoch": 0.16296296296296298,
"grad_norm": 12.3125,
"learning_rate": 1.944116159586169e-05,
"loss": 1.7405,
"step": 55
},
{
"epoch": 0.16592592592592592,
"grad_norm": 11.75,
"learning_rate": 1.9408138212453456e-05,
"loss": 1.608,
"step": 56
},
{
"epoch": 0.1688888888888889,
"grad_norm": 10.6875,
"learning_rate": 1.9374196611341212e-05,
"loss": 1.6267,
"step": 57
},
{
"epoch": 0.17185185185185184,
"grad_norm": 12.3125,
"learning_rate": 1.933934010516539e-05,
"loss": 1.7335,
"step": 58
},
{
"epoch": 0.1748148148148148,
"grad_norm": 11.125,
"learning_rate": 1.9303572095859545e-05,
"loss": 1.7112,
"step": 59
},
{
"epoch": 0.17777777777777778,
"grad_norm": 11.75,
"learning_rate": 1.9266896074318335e-05,
"loss": 1.8106,
"step": 60
},
{
"epoch": 0.18074074074074073,
"grad_norm": 10.875,
"learning_rate": 1.9229315620056805e-05,
"loss": 1.5596,
"step": 61
},
{
"epoch": 0.1837037037037037,
"grad_norm": 14.0625,
"learning_rate": 1.9190834400861035e-05,
"loss": 1.5379,
"step": 62
},
{
"epoch": 0.18666666666666668,
"grad_norm": 10.875,
"learning_rate": 1.9151456172430186e-05,
"loss": 1.5921,
"step": 63
},
{
"epoch": 0.18962962962962962,
"grad_norm": 10.75,
"learning_rate": 1.9111184778009934e-05,
"loss": 1.6252,
"step": 64
},
{
"epoch": 0.1925925925925926,
"grad_norm": 11.8125,
"learning_rate": 1.9070024148017375e-05,
"loss": 1.7589,
"step": 65
},
{
"epoch": 0.19555555555555557,
"grad_norm": 10.4375,
"learning_rate": 1.9027978299657436e-05,
"loss": 1.4877,
"step": 66
},
{
"epoch": 0.1985185185185185,
"grad_norm": 10.0625,
"learning_rate": 1.89850513365308e-05,
"loss": 1.6578,
"step": 67
},
{
"epoch": 0.20148148148148148,
"grad_norm": 11.5,
"learning_rate": 1.8941247448233386e-05,
"loss": 1.5563,
"step": 68
},
{
"epoch": 0.20444444444444446,
"grad_norm": 12.25,
"learning_rate": 1.8896570909947477e-05,
"loss": 1.6964,
"step": 69
},
{
"epoch": 0.2074074074074074,
"grad_norm": 10.5625,
"learning_rate": 1.885102608202444e-05,
"loss": 1.5913,
"step": 70
},
{
"epoch": 0.21037037037037037,
"grad_norm": 10.625,
"learning_rate": 1.88046174095592e-05,
"loss": 1.5556,
"step": 71
},
{
"epoch": 0.21333333333333335,
"grad_norm": 10.0625,
"learning_rate": 1.875734942195637e-05,
"loss": 1.5116,
"step": 72
},
{
"epoch": 0.2162962962962963,
"grad_norm": 9.6875,
"learning_rate": 1.8709226732488216e-05,
"loss": 1.5118,
"step": 73
},
{
"epoch": 0.21925925925925926,
"grad_norm": 10.4375,
"learning_rate": 1.866025403784439e-05,
"loss": 1.6544,
"step": 74
},
{
"epoch": 0.2222222222222222,
"grad_norm": 10.875,
"learning_rate": 1.8610436117673557e-05,
"loss": 1.5481,
"step": 75
},
{
"epoch": 0.22518518518518518,
"grad_norm": 10.75,
"learning_rate": 1.8559777834116906e-05,
"loss": 1.5817,
"step": 76
},
{
"epoch": 0.22814814814814816,
"grad_norm": 10.5,
"learning_rate": 1.8508284131333604e-05,
"loss": 1.5016,
"step": 77
},
{
"epoch": 0.2311111111111111,
"grad_norm": 10.8125,
"learning_rate": 1.845596003501826e-05,
"loss": 1.5924,
"step": 78
},
{
"epoch": 0.23407407407407407,
"grad_norm": 10.25,
"learning_rate": 1.8402810651910444e-05,
"loss": 1.63,
"step": 79
},
{
"epoch": 0.23703703703703705,
"grad_norm": 10.5625,
"learning_rate": 1.8348841169296247e-05,
"loss": 1.5974,
"step": 80
},
{
"epoch": 0.24,
"grad_norm": 9.875,
"learning_rate": 1.829405685450202e-05,
"loss": 1.5722,
"step": 81
},
{
"epoch": 0.24296296296296296,
"grad_norm": 10.5,
"learning_rate": 1.823846305438032e-05,
"loss": 1.4561,
"step": 82
},
{
"epoch": 0.24592592592592594,
"grad_norm": 9.5625,
"learning_rate": 1.8182065194788024e-05,
"loss": 1.4567,
"step": 83
},
{
"epoch": 0.24888888888888888,
"grad_norm": 9.875,
"learning_rate": 1.8124868780056814e-05,
"loss": 1.613,
"step": 84
},
{
"epoch": 0.2518518518518518,
"grad_norm": 9.75,
"learning_rate": 1.8066879392455932e-05,
"loss": 1.609,
"step": 85
},
{
"epoch": 0.2548148148148148,
"grad_norm": 10.1875,
"learning_rate": 1.800810269164738e-05,
"loss": 1.6807,
"step": 86
},
{
"epoch": 0.2577777777777778,
"grad_norm": 10.3125,
"learning_rate": 1.7948544414133534e-05,
"loss": 1.6024,
"step": 87
},
{
"epoch": 0.2607407407407407,
"grad_norm": 9.9375,
"learning_rate": 1.7888210372697292e-05,
"loss": 1.4733,
"step": 88
},
{
"epoch": 0.2637037037037037,
"grad_norm": 10.4375,
"learning_rate": 1.782710645583473e-05,
"loss": 1.5994,
"step": 89
},
{
"epoch": 0.26666666666666666,
"grad_norm": 13.625,
"learning_rate": 1.7765238627180424e-05,
"loss": 1.3751,
"step": 90
},
{
"epoch": 0.2696296296296296,
"grad_norm": 10.875,
"learning_rate": 1.7702612924925377e-05,
"loss": 1.436,
"step": 91
},
{
"epoch": 0.2725925925925926,
"grad_norm": 9.625,
"learning_rate": 1.7639235461227727e-05,
"loss": 1.5887,
"step": 92
},
{
"epoch": 0.27555555555555555,
"grad_norm": 9.6875,
"learning_rate": 1.7575112421616203e-05,
"loss": 1.5977,
"step": 93
},
{
"epoch": 0.2785185185185185,
"grad_norm": 10.25,
"learning_rate": 1.751025006438643e-05,
"loss": 1.518,
"step": 94
},
{
"epoch": 0.2814814814814815,
"grad_norm": 10.8125,
"learning_rate": 1.7444654719990128e-05,
"loss": 1.6543,
"step": 95
},
{
"epoch": 0.28444444444444444,
"grad_norm": 9.9375,
"learning_rate": 1.7378332790417275e-05,
"loss": 1.5368,
"step": 96
},
{
"epoch": 0.2874074074074074,
"grad_norm": 10.0,
"learning_rate": 1.7311290748571273e-05,
"loss": 1.5349,
"step": 97
},
{
"epoch": 0.2903703703703704,
"grad_norm": 9.875,
"learning_rate": 1.7243535137637227e-05,
"loss": 1.6126,
"step": 98
},
{
"epoch": 0.29333333333333333,
"grad_norm": 9.8125,
"learning_rate": 1.717507257044331e-05,
"loss": 1.5037,
"step": 99
},
{
"epoch": 0.2962962962962963,
"grad_norm": 9.3125,
"learning_rate": 1.7105909728815398e-05,
"loss": 1.5038,
"step": 100
},
{
"epoch": 0.2992592592592593,
"grad_norm": 9.0,
"learning_rate": 1.7036053362924896e-05,
"loss": 1.3771,
"step": 101
},
{
"epoch": 0.3022222222222222,
"grad_norm": 9.375,
"learning_rate": 1.6965510290629973e-05,
"loss": 1.5432,
"step": 102
},
{
"epoch": 0.30518518518518517,
"grad_norm": 9.375,
"learning_rate": 1.689428739681012e-05,
"loss": 1.4479,
"step": 103
},
{
"epoch": 0.30814814814814817,
"grad_norm": 11.1875,
"learning_rate": 1.682239163269422e-05,
"loss": 1.5956,
"step": 104
},
{
"epoch": 0.3111111111111111,
"grad_norm": 10.5,
"learning_rate": 1.6749830015182106e-05,
"loss": 1.4063,
"step": 105
},
{
"epoch": 0.31407407407407406,
"grad_norm": 10.0625,
"learning_rate": 1.667660962615973e-05,
"loss": 1.6329,
"step": 106
},
{
"epoch": 0.31703703703703706,
"grad_norm": 11.3125,
"learning_rate": 1.6602737611807975e-05,
"loss": 1.5915,
"step": 107
},
{
"epoch": 0.32,
"grad_norm": 9.375,
"learning_rate": 1.6528221181905217e-05,
"loss": 1.3679,
"step": 108
},
{
"epoch": 0.32296296296296295,
"grad_norm": 9.1875,
"learning_rate": 1.6453067609123656e-05,
"loss": 1.4718,
"step": 109
},
{
"epoch": 0.32592592592592595,
"grad_norm": 9.6875,
"learning_rate": 1.6377284228319496e-05,
"loss": 1.4929,
"step": 110
},
{
"epoch": 0.3288888888888889,
"grad_norm": 9.9375,
"learning_rate": 1.6300878435817115e-05,
"loss": 1.4887,
"step": 111
},
{
"epoch": 0.33185185185185184,
"grad_norm": 10.1875,
"learning_rate": 1.622385768868716e-05,
"loss": 1.5634,
"step": 112
},
{
"epoch": 0.3348148148148148,
"grad_norm": 9.75,
"learning_rate": 1.6146229504018777e-05,
"loss": 1.4839,
"step": 113
},
{
"epoch": 0.3377777777777778,
"grad_norm": 10.9375,
"learning_rate": 1.6068001458185934e-05,
"loss": 1.4955,
"step": 114
},
{
"epoch": 0.34074074074074073,
"grad_norm": 9.5625,
"learning_rate": 1.5989181186108003e-05,
"loss": 1.4464,
"step": 115
},
{
"epoch": 0.3437037037037037,
"grad_norm": 9.8125,
"learning_rate": 1.5909776380504583e-05,
"loss": 1.4568,
"step": 116
},
{
"epoch": 0.3466666666666667,
"grad_norm": 10.4375,
"learning_rate": 1.5829794791144723e-05,
"loss": 1.5491,
"step": 117
},
{
"epoch": 0.3496296296296296,
"grad_norm": 10.9375,
"learning_rate": 1.5749244224090537e-05,
"loss": 1.4774,
"step": 118
},
{
"epoch": 0.35259259259259257,
"grad_norm": 10.875,
"learning_rate": 1.566813254093538e-05,
"loss": 1.5642,
"step": 119
},
{
"epoch": 0.35555555555555557,
"grad_norm": 10.4375,
"learning_rate": 1.5586467658036526e-05,
"loss": 1.5229,
"step": 120
},
{
"epoch": 0.3585185185185185,
"grad_norm": 10.1875,
"learning_rate": 1.5504257545742585e-05,
"loss": 1.5067,
"step": 121
},
{
"epoch": 0.36148148148148146,
"grad_norm": 9.6875,
"learning_rate": 1.5421510227615587e-05,
"loss": 1.4074,
"step": 122
},
{
"epoch": 0.36444444444444446,
"grad_norm": 11.3125,
"learning_rate": 1.533823377964791e-05,
"loss": 1.4726,
"step": 123
},
{
"epoch": 0.3674074074074074,
"grad_norm": 12.75,
"learning_rate": 1.5254436329474062e-05,
"loss": 1.351,
"step": 124
},
{
"epoch": 0.37037037037037035,
"grad_norm": 10.6875,
"learning_rate": 1.517012605557746e-05,
"loss": 1.5042,
"step": 125
},
{
"epoch": 0.37333333333333335,
"grad_norm": 9.8125,
"learning_rate": 1.5085311186492206e-05,
"loss": 1.5669,
"step": 126
},
{
"epoch": 0.3762962962962963,
"grad_norm": 9.75,
"learning_rate": 1.5000000000000002e-05,
"loss": 1.4575,
"step": 127
},
{
"epoch": 0.37925925925925924,
"grad_norm": 9.9375,
"learning_rate": 1.4914200822322255e-05,
"loss": 1.5091,
"step": 128
},
{
"epoch": 0.38222222222222224,
"grad_norm": 12.5,
"learning_rate": 1.482792202730745e-05,
"loss": 1.6976,
"step": 129
},
{
"epoch": 0.3851851851851852,
"grad_norm": 11.625,
"learning_rate": 1.4741172035613884e-05,
"loss": 1.5555,
"step": 130
},
{
"epoch": 0.38814814814814813,
"grad_norm": 10.3125,
"learning_rate": 1.4653959313887813e-05,
"loss": 1.7106,
"step": 131
},
{
"epoch": 0.39111111111111113,
"grad_norm": 10.3125,
"learning_rate": 1.4566292373937133e-05,
"loss": 1.4944,
"step": 132
},
{
"epoch": 0.3940740740740741,
"grad_norm": 9.3125,
"learning_rate": 1.4478179771900634e-05,
"loss": 1.4851,
"step": 133
},
{
"epoch": 0.397037037037037,
"grad_norm": 9.0,
"learning_rate": 1.4389630107412942e-05,
"loss": 1.5191,
"step": 134
},
{
"epoch": 0.4,
"grad_norm": 10.625,
"learning_rate": 1.4300652022765207e-05,
"loss": 1.3619,
"step": 135
},
{
"epoch": 0.40296296296296297,
"grad_norm": 10.4375,
"learning_rate": 1.4211254202061632e-05,
"loss": 1.457,
"step": 136
},
{
"epoch": 0.4059259259259259,
"grad_norm": 9.3125,
"learning_rate": 1.4121445370371922e-05,
"loss": 1.4453,
"step": 137
},
{
"epoch": 0.4088888888888889,
"grad_norm": 9.375,
"learning_rate": 1.4031234292879726e-05,
"loss": 1.4961,
"step": 138
},
{
"epoch": 0.41185185185185186,
"grad_norm": 9.625,
"learning_rate": 1.394062977402717e-05,
"loss": 1.4724,
"step": 139
},
{
"epoch": 0.4148148148148148,
"grad_norm": 10.3125,
"learning_rate": 1.3849640656655572e-05,
"loss": 1.4337,
"step": 140
},
{
"epoch": 0.4177777777777778,
"grad_norm": 11.1875,
"learning_rate": 1.3758275821142382e-05,
"loss": 1.4608,
"step": 141
},
{
"epoch": 0.42074074074074075,
"grad_norm": 10.0,
"learning_rate": 1.3666544184534483e-05,
"loss": 1.4542,
"step": 142
},
{
"epoch": 0.4237037037037037,
"grad_norm": 10.6875,
"learning_rate": 1.3574454699677893e-05,
"loss": 1.5512,
"step": 143
},
{
"epoch": 0.4266666666666667,
"grad_norm": 10.3125,
"learning_rate": 1.348201635434399e-05,
"loss": 1.56,
"step": 144
},
{
"epoch": 0.42962962962962964,
"grad_norm": 9.625,
"learning_rate": 1.3389238170352318e-05,
"loss": 1.3596,
"step": 145
},
{
"epoch": 0.4325925925925926,
"grad_norm": 9.5,
"learning_rate": 1.329612920269008e-05,
"loss": 1.4438,
"step": 146
},
{
"epoch": 0.43555555555555553,
"grad_norm": 10.1875,
"learning_rate": 1.3202698538628376e-05,
"loss": 1.4873,
"step": 147
},
{
"epoch": 0.43851851851851853,
"grad_norm": 9.8125,
"learning_rate": 1.3108955296835313e-05,
"loss": 1.4859,
"step": 148
},
{
"epoch": 0.4414814814814815,
"grad_norm": 9.75,
"learning_rate": 1.3014908626486032e-05,
"loss": 1.4505,
"step": 149
},
{
"epoch": 0.4444444444444444,
"grad_norm": 9.9375,
"learning_rate": 1.292056770636976e-05,
"loss": 1.3619,
"step": 150
},
{
"epoch": 0.4474074074074074,
"grad_norm": 10.25,
"learning_rate": 1.282594174399399e-05,
"loss": 1.4956,
"step": 151
},
{
"epoch": 0.45037037037037037,
"grad_norm": 9.6875,
"learning_rate": 1.2731039974685833e-05,
"loss": 1.4486,
"step": 152
},
{
"epoch": 0.4533333333333333,
"grad_norm": 9.5625,
"learning_rate": 1.2635871660690677e-05,
"loss": 1.377,
"step": 153
},
{
"epoch": 0.4562962962962963,
"grad_norm": 9.4375,
"learning_rate": 1.2540446090268193e-05,
"loss": 1.4245,
"step": 154
},
{
"epoch": 0.45925925925925926,
"grad_norm": 9.375,
"learning_rate": 1.2444772576785828e-05,
"loss": 1.407,
"step": 155
},
{
"epoch": 0.4622222222222222,
"grad_norm": 9.5625,
"learning_rate": 1.234886045780984e-05,
"loss": 1.4341,
"step": 156
},
{
"epoch": 0.4651851851851852,
"grad_norm": 9.3125,
"learning_rate": 1.225271909419395e-05,
"loss": 1.5296,
"step": 157
},
{
"epoch": 0.46814814814814815,
"grad_norm": 9.0625,
"learning_rate": 1.2156357869165771e-05,
"loss": 1.4201,
"step": 158
},
{
"epoch": 0.4711111111111111,
"grad_norm": 9.0625,
"learning_rate": 1.2059786187410984e-05,
"loss": 1.458,
"step": 159
},
{
"epoch": 0.4740740740740741,
"grad_norm": 9.8125,
"learning_rate": 1.1963013474155487e-05,
"loss": 1.5306,
"step": 160
},
{
"epoch": 0.47703703703703704,
"grad_norm": 9.4375,
"learning_rate": 1.186604917424549e-05,
"loss": 1.437,
"step": 161
},
{
"epoch": 0.48,
"grad_norm": 9.5,
"learning_rate": 1.176890275122573e-05,
"loss": 1.4678,
"step": 162
},
{
"epoch": 0.482962962962963,
"grad_norm": 9.0625,
"learning_rate": 1.1671583686415833e-05,
"loss": 1.2917,
"step": 163
},
{
"epoch": 0.48592592592592593,
"grad_norm": 9.25,
"learning_rate": 1.1574101477984966e-05,
"loss": 1.4942,
"step": 164
},
{
"epoch": 0.4888888888888889,
"grad_norm": 9.5625,
"learning_rate": 1.1476465640024814e-05,
"loss": 1.458,
"step": 165
},
{
"epoch": 0.4918518518518519,
"grad_norm": 10.0625,
"learning_rate": 1.1378685701621047e-05,
"loss": 1.4213,
"step": 166
},
{
"epoch": 0.4948148148148148,
"grad_norm": 8.625,
"learning_rate": 1.1280771205923269e-05,
"loss": 1.2903,
"step": 167
},
{
"epoch": 0.49777777777777776,
"grad_norm": 9.125,
"learning_rate": 1.1182731709213658e-05,
"loss": 1.3953,
"step": 168
},
{
"epoch": 0.5007407407407407,
"grad_norm": 9.5,
"learning_rate": 1.1084576779974257e-05,
"loss": 1.4805,
"step": 169
},
{
"epoch": 0.5037037037037037,
"grad_norm": 9.75,
"learning_rate": 1.0986315997953118e-05,
"loss": 1.4822,
"step": 170
},
{
"epoch": 0.5066666666666667,
"grad_norm": 9.75,
"learning_rate": 1.0887958953229349e-05,
"loss": 1.4369,
"step": 171
},
{
"epoch": 0.5096296296296297,
"grad_norm": 9.375,
"learning_rate": 1.078951524527712e-05,
"loss": 1.3851,
"step": 172
},
{
"epoch": 0.5125925925925926,
"grad_norm": 10.375,
"learning_rate": 1.069099448202878e-05,
"loss": 1.6454,
"step": 173
},
{
"epoch": 0.5155555555555555,
"grad_norm": 9.1875,
"learning_rate": 1.0592406278937143e-05,
"loss": 1.4134,
"step": 174
},
{
"epoch": 0.5185185185185185,
"grad_norm": 9.625,
"learning_rate": 1.049376025803703e-05,
"loss": 1.3613,
"step": 175
},
{
"epoch": 0.5214814814814814,
"grad_norm": 9.5625,
"learning_rate": 1.039506604700618e-05,
"loss": 1.3413,
"step": 176
},
{
"epoch": 0.5244444444444445,
"grad_norm": 8.875,
"learning_rate": 1.0296333278225599e-05,
"loss": 1.3768,
"step": 177
},
{
"epoch": 0.5274074074074074,
"grad_norm": 9.875,
"learning_rate": 1.0197571587839466e-05,
"loss": 1.4624,
"step": 178
},
{
"epoch": 0.5303703703703704,
"grad_norm": 9.5,
"learning_rate": 1.0098790614814658e-05,
"loss": 1.3982,
"step": 179
},
{
"epoch": 0.5333333333333333,
"grad_norm": 9.0,
"learning_rate": 1e-05,
"loss": 1.4345,
"step": 180
},
{
"epoch": 0.5362962962962963,
"grad_norm": 9.4375,
"learning_rate": 9.901209385185345e-06,
"loss": 1.4967,
"step": 181
},
{
"epoch": 0.5392592592592592,
"grad_norm": 10.0625,
"learning_rate": 9.802428412160538e-06,
"loss": 1.5533,
"step": 182
},
{
"epoch": 0.5422222222222223,
"grad_norm": 10.375,
"learning_rate": 9.703666721774403e-06,
"loss": 1.4622,
"step": 183
},
{
"epoch": 0.5451851851851852,
"grad_norm": 9.4375,
"learning_rate": 9.604933952993822e-06,
"loss": 1.4235,
"step": 184
},
{
"epoch": 0.5481481481481482,
"grad_norm": 9.5,
"learning_rate": 9.506239741962971e-06,
"loss": 1.4563,
"step": 185
},
{
"epoch": 0.5511111111111111,
"grad_norm": 9.6875,
"learning_rate": 9.407593721062858e-06,
"loss": 1.5149,
"step": 186
},
{
"epoch": 0.554074074074074,
"grad_norm": 9.25,
"learning_rate": 9.309005517971222e-06,
"loss": 1.4514,
"step": 187
},
{
"epoch": 0.557037037037037,
"grad_norm": 9.8125,
"learning_rate": 9.210484754722882e-06,
"loss": 1.4964,
"step": 188
},
{
"epoch": 0.56,
"grad_norm": 9.375,
"learning_rate": 9.112041046770653e-06,
"loss": 1.3697,
"step": 189
},
{
"epoch": 0.562962962962963,
"grad_norm": 10.25,
"learning_rate": 9.013684002046883e-06,
"loss": 1.2907,
"step": 190
},
{
"epoch": 0.5659259259259259,
"grad_norm": 9.625,
"learning_rate": 8.915423220025747e-06,
"loss": 1.3109,
"step": 191
},
{
"epoch": 0.5688888888888889,
"grad_norm": 9.25,
"learning_rate": 8.817268290786343e-06,
"loss": 1.4432,
"step": 192
},
{
"epoch": 0.5718518518518518,
"grad_norm": 9.6875,
"learning_rate": 8.719228794076733e-06,
"loss": 1.4068,
"step": 193
},
{
"epoch": 0.5748148148148148,
"grad_norm": 10.75,
"learning_rate": 8.621314298378958e-06,
"loss": 1.4999,
"step": 194
},
{
"epoch": 0.5777777777777777,
"grad_norm": 9.875,
"learning_rate": 8.52353435997519e-06,
"loss": 1.5483,
"step": 195
},
{
"epoch": 0.5807407407407408,
"grad_norm": 9.25,
"learning_rate": 8.425898522015038e-06,
"loss": 1.3818,
"step": 196
},
{
"epoch": 0.5837037037037037,
"grad_norm": 9.8125,
"learning_rate": 8.328416313584169e-06,
"loss": 1.4961,
"step": 197
},
{
"epoch": 0.5866666666666667,
"grad_norm": 9.8125,
"learning_rate": 8.231097248774273e-06,
"loss": 1.4898,
"step": 198
},
{
"epoch": 0.5896296296296296,
"grad_norm": 9.4375,
"learning_rate": 8.133950825754511e-06,
"loss": 1.3789,
"step": 199
},
{
"epoch": 0.5925925925925926,
"grad_norm": 9.5625,
"learning_rate": 8.036986525844516e-06,
"loss": 1.3425,
"step": 200
},
{
"epoch": 0.5955555555555555,
"grad_norm": 10.4375,
"learning_rate": 7.940213812589018e-06,
"loss": 1.6482,
"step": 201
},
{
"epoch": 0.5985185185185186,
"grad_norm": 9.875,
"learning_rate": 7.843642130834232e-06,
"loss": 1.4535,
"step": 202
},
{
"epoch": 0.6014814814814815,
"grad_norm": 9.5625,
"learning_rate": 7.747280905806051e-06,
"loss": 1.3676,
"step": 203
},
{
"epoch": 0.6044444444444445,
"grad_norm": 9.6875,
"learning_rate": 7.651139542190164e-06,
"loss": 1.498,
"step": 204
},
{
"epoch": 0.6074074074074074,
"grad_norm": 9.75,
"learning_rate": 7.555227423214174e-06,
"loss": 1.4675,
"step": 205
},
{
"epoch": 0.6103703703703703,
"grad_norm": 9.75,
"learning_rate": 7.45955390973181e-06,
"loss": 1.5049,
"step": 206
},
{
"epoch": 0.6133333333333333,
"grad_norm": 9.125,
"learning_rate": 7.364128339309326e-06,
"loss": 1.4279,
"step": 207
},
{
"epoch": 0.6162962962962963,
"grad_norm": 9.5625,
"learning_rate": 7.268960025314169e-06,
"loss": 1.504,
"step": 208
},
{
"epoch": 0.6192592592592593,
"grad_norm": 9.0625,
"learning_rate": 7.174058256006012e-06,
"loss": 1.4354,
"step": 209
},
{
"epoch": 0.6222222222222222,
"grad_norm": 9.1875,
"learning_rate": 7.079432293630244e-06,
"loss": 1.3362,
"step": 210
},
{
"epoch": 0.6251851851851852,
"grad_norm": 9.9375,
"learning_rate": 6.985091373513972e-06,
"loss": 1.3616,
"step": 211
},
{
"epoch": 0.6281481481481481,
"grad_norm": 9.4375,
"learning_rate": 6.8910447031646884e-06,
"loss": 1.4284,
"step": 212
},
{
"epoch": 0.6311111111111111,
"grad_norm": 9.0,
"learning_rate": 6.797301461371626e-06,
"loss": 1.2881,
"step": 213
},
{
"epoch": 0.6340740740740741,
"grad_norm": 10.0625,
"learning_rate": 6.703870797309922e-06,
"loss": 1.4838,
"step": 214
},
{
"epoch": 0.6370370370370371,
"grad_norm": 10.625,
"learning_rate": 6.610761829647685e-06,
"loss": 1.5364,
"step": 215
},
{
"epoch": 0.64,
"grad_norm": 9.625,
"learning_rate": 6.517983645656014e-06,
"loss": 1.4076,
"step": 216
},
{
"epoch": 0.642962962962963,
"grad_norm": 9.8125,
"learning_rate": 6.4255453003221115e-06,
"loss": 1.3774,
"step": 217
},
{
"epoch": 0.6459259259259259,
"grad_norm": 9.4375,
"learning_rate": 6.33345581546552e-06,
"loss": 1.4266,
"step": 218
},
{
"epoch": 0.6488888888888888,
"grad_norm": 8.875,
"learning_rate": 6.241724178857621e-06,
"loss": 1.3279,
"step": 219
},
{
"epoch": 0.6518518518518519,
"grad_norm": 10.5625,
"learning_rate": 6.1503593433444316e-06,
"loss": 1.4278,
"step": 220
},
{
"epoch": 0.6548148148148148,
"grad_norm": 9.6875,
"learning_rate": 6.059370225972834e-06,
"loss": 1.454,
"step": 221
},
{
"epoch": 0.6577777777777778,
"grad_norm": 9.5,
"learning_rate": 5.96876570712028e-06,
"loss": 1.3883,
"step": 222
},
{
"epoch": 0.6607407407407407,
"grad_norm": 9.5,
"learning_rate": 5.878554629628081e-06,
"loss": 1.3739,
"step": 223
},
{
"epoch": 0.6637037037037037,
"grad_norm": 9.3125,
"learning_rate": 5.788745797938372e-06,
"loss": 1.381,
"step": 224
},
{
"epoch": 0.6666666666666666,
"grad_norm": 9.375,
"learning_rate": 5.699347977234799e-06,
"loss": 1.3763,
"step": 225
},
{
"epoch": 0.6696296296296296,
"grad_norm": 9.4375,
"learning_rate": 5.610369892587064e-06,
"loss": 1.2782,
"step": 226
},
{
"epoch": 0.6725925925925926,
"grad_norm": 9.1875,
"learning_rate": 5.5218202280993725e-06,
"loss": 1.428,
"step": 227
},
{
"epoch": 0.6755555555555556,
"grad_norm": 9.4375,
"learning_rate": 5.43370762606287e-06,
"loss": 1.4381,
"step": 228
},
{
"epoch": 0.6785185185185185,
"grad_norm": 9.875,
"learning_rate": 5.346040686112189e-06,
"loss": 1.3499,
"step": 229
},
{
"epoch": 0.6814814814814815,
"grad_norm": 9.6875,
"learning_rate": 5.25882796438612e-06,
"loss": 1.3794,
"step": 230
},
{
"epoch": 0.6844444444444444,
"grad_norm": 9.6875,
"learning_rate": 5.172077972692553e-06,
"loss": 1.3564,
"step": 231
},
{
"epoch": 0.6874074074074074,
"grad_norm": 10.0,
"learning_rate": 5.08579917767775e-06,
"loss": 1.4274,
"step": 232
},
{
"epoch": 0.6903703703703704,
"grad_norm": 8.9375,
"learning_rate": 5.000000000000003e-06,
"loss": 1.267,
"step": 233
},
{
"epoch": 0.6933333333333334,
"grad_norm": 9.375,
"learning_rate": 4.914688813507798e-06,
"loss": 1.3242,
"step": 234
},
{
"epoch": 0.6962962962962963,
"grad_norm": 9.25,
"learning_rate": 4.829873944422544e-06,
"loss": 1.3488,
"step": 235
},
{
"epoch": 0.6992592592592592,
"grad_norm": 9.125,
"learning_rate": 4.745563670525942e-06,
"loss": 1.3237,
"step": 236
},
{
"epoch": 0.7022222222222222,
"grad_norm": 9.5625,
"learning_rate": 4.661766220352098e-06,
"loss": 1.3183,
"step": 237
},
{
"epoch": 0.7051851851851851,
"grad_norm": 9.5625,
"learning_rate": 4.578489772384415e-06,
"loss": 1.4587,
"step": 238
},
{
"epoch": 0.7081481481481482,
"grad_norm": 9.5,
"learning_rate": 4.495742454257418e-06,
"loss": 1.2974,
"step": 239
},
{
"epoch": 0.7111111111111111,
"grad_norm": 9.5625,
"learning_rate": 4.413532341963477e-06,
"loss": 1.4561,
"step": 240
},
{
"epoch": 0.7140740740740741,
"grad_norm": 9.625,
"learning_rate": 4.331867459064623e-06,
"loss": 1.4047,
"step": 241
},
{
"epoch": 0.717037037037037,
"grad_norm": 9.3125,
"learning_rate": 4.250755775909465e-06,
"loss": 1.4223,
"step": 242
},
{
"epoch": 0.72,
"grad_norm": 9.3125,
"learning_rate": 4.170205208855281e-06,
"loss": 1.4031,
"step": 243
},
{
"epoch": 0.7229629629629629,
"grad_norm": 9.125,
"learning_rate": 4.090223619495419e-06,
"loss": 1.3283,
"step": 244
},
{
"epoch": 0.725925925925926,
"grad_norm": 9.375,
"learning_rate": 4.010818813892e-06,
"loss": 1.419,
"step": 245
},
{
"epoch": 0.7288888888888889,
"grad_norm": 9.25,
"learning_rate": 3.931998541814069e-06,
"loss": 1.3429,
"step": 246
},
{
"epoch": 0.7318518518518519,
"grad_norm": 9.3125,
"learning_rate": 3.85377049598123e-06,
"loss": 1.436,
"step": 247
},
{
"epoch": 0.7348148148148148,
"grad_norm": 9.375,
"learning_rate": 3.7761423113128427e-06,
"loss": 1.3918,
"step": 248
},
{
"epoch": 0.7377777777777778,
"grad_norm": 9.6875,
"learning_rate": 3.6991215641828903e-06,
"loss": 1.3523,
"step": 249
},
{
"epoch": 0.7407407407407407,
"grad_norm": 9.4375,
"learning_rate": 3.622715771680508e-06,
"loss": 1.3808,
"step": 250
},
{
"epoch": 0.7437037037037038,
"grad_norm": 9.375,
"learning_rate": 3.5469323908763507e-06,
"loss": 1.4308,
"step": 251
},
{
"epoch": 0.7466666666666667,
"grad_norm": 9.4375,
"learning_rate": 3.4717788180947855e-06,
"loss": 1.3985,
"step": 252
},
{
"epoch": 0.7496296296296296,
"grad_norm": 9.625,
"learning_rate": 3.3972623881920296e-06,
"loss": 1.3034,
"step": 253
},
{
"epoch": 0.7525925925925926,
"grad_norm": 9.3125,
"learning_rate": 3.323390373840276e-06,
"loss": 1.3579,
"step": 254
},
{
"epoch": 0.7555555555555555,
"grad_norm": 9.25,
"learning_rate": 3.250169984817897e-06,
"loss": 1.3301,
"step": 255
},
{
"epoch": 0.7585185185185185,
"grad_norm": 9.125,
"learning_rate": 3.1776083673057834e-06,
"loss": 1.308,
"step": 256
},
{
"epoch": 0.7614814814814815,
"grad_norm": 9.1875,
"learning_rate": 3.1057126031898843e-06,
"loss": 1.3278,
"step": 257
},
{
"epoch": 0.7644444444444445,
"grad_norm": 9.4375,
"learning_rate": 3.0344897093700333e-06,
"loss": 1.3678,
"step": 258
},
{
"epoch": 0.7674074074074074,
"grad_norm": 9.25,
"learning_rate": 2.963946637075107e-06,
"loss": 1.2888,
"step": 259
},
{
"epoch": 0.7703703703703704,
"grad_norm": 9.1875,
"learning_rate": 2.8940902711846052e-06,
"loss": 1.2899,
"step": 260
},
{
"epoch": 0.7733333333333333,
"grad_norm": 8.625,
"learning_rate": 2.8249274295566863e-06,
"loss": 1.2525,
"step": 261
},
{
"epoch": 0.7762962962962963,
"grad_norm": 8.8125,
"learning_rate": 2.756464862362772e-06,
"loss": 1.2665,
"step": 262
},
{
"epoch": 0.7792592592592592,
"grad_norm": 9.4375,
"learning_rate": 2.688709251428725e-06,
"loss": 1.363,
"step": 263
},
{
"epoch": 0.7822222222222223,
"grad_norm": 9.0625,
"learning_rate": 2.6216672095827267e-06,
"loss": 1.3086,
"step": 264
},
{
"epoch": 0.7851851851851852,
"grad_norm": 9.3125,
"learning_rate": 2.555345280009872e-06,
"loss": 1.4259,
"step": 265
},
{
"epoch": 0.7881481481481482,
"grad_norm": 9.125,
"learning_rate": 2.4897499356135712e-06,
"loss": 1.2748,
"step": 266
},
{
"epoch": 0.7911111111111111,
"grad_norm": 9.25,
"learning_rate": 2.424887578383799e-06,
"loss": 1.329,
"step": 267
},
{
"epoch": 0.794074074074074,
"grad_norm": 9.5625,
"learning_rate": 2.3607645387722753e-06,
"loss": 1.4323,
"step": 268
},
{
"epoch": 0.797037037037037,
"grad_norm": 9.5625,
"learning_rate": 2.2973870750746253e-06,
"loss": 1.3081,
"step": 269
},
{
"epoch": 0.8,
"grad_norm": 9.375,
"learning_rate": 2.234761372819577e-06,
"loss": 1.3967,
"step": 270
},
{
"epoch": 0.802962962962963,
"grad_norm": 9.3125,
"learning_rate": 2.1728935441652687e-06,
"loss": 1.345,
"step": 271
},
{
"epoch": 0.8059259259259259,
"grad_norm": 9.0,
"learning_rate": 2.111789627302707e-06,
"loss": 1.3009,
"step": 272
},
{
"epoch": 0.8088888888888889,
"grad_norm": 9.25,
"learning_rate": 2.0514555858664663e-06,
"loss": 1.3242,
"step": 273
},
{
"epoch": 0.8118518518518518,
"grad_norm": 9.3125,
"learning_rate": 1.991897308352624e-06,
"loss": 1.3109,
"step": 274
},
{
"epoch": 0.8148148148148148,
"grad_norm": 8.9375,
"learning_rate": 1.93312060754407e-06,
"loss": 1.3353,
"step": 275
},
{
"epoch": 0.8177777777777778,
"grad_norm": 9.625,
"learning_rate": 1.875131219943187e-06,
"loss": 1.2318,
"step": 276
},
{
"epoch": 0.8207407407407408,
"grad_norm": 9.0,
"learning_rate": 1.817934805211976e-06,
"loss": 1.2841,
"step": 277
},
{
"epoch": 0.8237037037037037,
"grad_norm": 8.6875,
"learning_rate": 1.761536945619684e-06,
"loss": 1.2261,
"step": 278
},
{
"epoch": 0.8266666666666667,
"grad_norm": 9.5625,
"learning_rate": 1.7059431454979825e-06,
"loss": 1.364,
"step": 279
},
{
"epoch": 0.8296296296296296,
"grad_norm": 9.125,
"learning_rate": 1.6511588307037596e-06,
"loss": 1.426,
"step": 280
},
{
"epoch": 0.8325925925925926,
"grad_norm": 9.625,
"learning_rate": 1.5971893480895583e-06,
"loss": 1.458,
"step": 281
},
{
"epoch": 0.8355555555555556,
"grad_norm": 8.875,
"learning_rate": 1.5440399649817384e-06,
"loss": 1.4178,
"step": 282
},
{
"epoch": 0.8385185185185186,
"grad_norm": 9.0625,
"learning_rate": 1.4917158686663992e-06,
"loss": 1.3404,
"step": 283
},
{
"epoch": 0.8414814814814815,
"grad_norm": 8.75,
"learning_rate": 1.4402221658830963e-06,
"loss": 1.3145,
"step": 284
},
{
"epoch": 0.8444444444444444,
"grad_norm": 9.3125,
"learning_rate": 1.3895638823264447e-06,
"loss": 1.2529,
"step": 285
},
{
"epoch": 0.8474074074074074,
"grad_norm": 10.6875,
"learning_rate": 1.339745962155613e-06,
"loss": 1.5298,
"step": 286
},
{
"epoch": 0.8503703703703703,
"grad_norm": 8.75,
"learning_rate": 1.2907732675117878e-06,
"loss": 1.2976,
"step": 287
},
{
"epoch": 0.8533333333333334,
"grad_norm": 9.875,
"learning_rate": 1.2426505780436326e-06,
"loss": 1.3695,
"step": 288
},
{
"epoch": 0.8562962962962963,
"grad_norm": 8.75,
"learning_rate": 1.1953825904408033e-06,
"loss": 1.2496,
"step": 289
},
{
"epoch": 0.8592592592592593,
"grad_norm": 9.8125,
"learning_rate": 1.1489739179755622e-06,
"loss": 1.4167,
"step": 290
},
{
"epoch": 0.8622222222222222,
"grad_norm": 9.5,
"learning_rate": 1.1034290900525279e-06,
"loss": 1.3485,
"step": 291
},
{
"epoch": 0.8651851851851852,
"grad_norm": 8.9375,
"learning_rate": 1.0587525517666142e-06,
"loss": 1.2987,
"step": 292
},
{
"epoch": 0.8681481481481481,
"grad_norm": 9.3125,
"learning_rate": 1.0149486634692019e-06,
"loss": 1.2708,
"step": 293
},
{
"epoch": 0.8711111111111111,
"grad_norm": 9.0625,
"learning_rate": 9.720217003425648e-07,
"loss": 1.405,
"step": 294
},
{
"epoch": 0.8740740740740741,
"grad_norm": 9.125,
"learning_rate": 9.299758519826274e-07,
"loss": 1.3643,
"step": 295
},
{
"epoch": 0.8770370370370371,
"grad_norm": 9.125,
"learning_rate": 8.888152219900692e-07,
"loss": 1.33,
"step": 296
},
{
"epoch": 0.88,
"grad_norm": 9.6875,
"learning_rate": 8.485438275698154e-07,
"loss": 1.4734,
"step": 297
},
{
"epoch": 0.882962962962963,
"grad_norm": 9.75,
"learning_rate": 8.091655991389668e-07,
"loss": 1.3231,
"step": 298
},
{
"epoch": 0.8859259259259259,
"grad_norm": 8.9375,
"learning_rate": 7.706843799431985e-07,
"loss": 1.2256,
"step": 299
},
{
"epoch": 0.8888888888888888,
"grad_norm": 9.6875,
"learning_rate": 7.331039256816664e-07,
"loss": 1.3831,
"step": 300
},
{
"epoch": 0.8918518518518519,
"grad_norm": 8.9375,
"learning_rate": 6.964279041404553e-07,
"loss": 1.2743,
"step": 301
},
{
"epoch": 0.8948148148148148,
"grad_norm": 9.1875,
"learning_rate": 6.606598948346132e-07,
"loss": 1.3141,
"step": 302
},
{
"epoch": 0.8977777777777778,
"grad_norm": 9.8125,
"learning_rate": 6.258033886587911e-07,
"loss": 1.4539,
"step": 303
},
{
"epoch": 0.9007407407407407,
"grad_norm": 10.3125,
"learning_rate": 5.918617875465449e-07,
"loss": 1.4409,
"step": 304
},
{
"epoch": 0.9037037037037037,
"grad_norm": 9.9375,
"learning_rate": 5.588384041383089e-07,
"loss": 1.3793,
"step": 305
},
{
"epoch": 0.9066666666666666,
"grad_norm": 9.8125,
"learning_rate": 5.267364614580861e-07,
"loss": 1.391,
"step": 306
},
{
"epoch": 0.9096296296296297,
"grad_norm": 8.9375,
"learning_rate": 4.955590925988896e-07,
"loss": 1.2844,
"step": 307
},
{
"epoch": 0.9125925925925926,
"grad_norm": 9.6875,
"learning_rate": 4.653093404169573e-07,
"loss": 1.3854,
"step": 308
},
{
"epoch": 0.9155555555555556,
"grad_norm": 9.9375,
"learning_rate": 4.359901572347758e-07,
"loss": 1.378,
"step": 309
},
{
"epoch": 0.9185185185185185,
"grad_norm": 10.0625,
"learning_rate": 4.0760440455293703e-07,
"loss": 1.4957,
"step": 310
},
{
"epoch": 0.9214814814814815,
"grad_norm": 10.0625,
"learning_rate": 3.801548527708621e-07,
"loss": 1.518,
"step": 311
},
{
"epoch": 0.9244444444444444,
"grad_norm": 9.1875,
"learning_rate": 3.5364418091641374e-07,
"loss": 1.2608,
"step": 312
},
{
"epoch": 0.9274074074074075,
"grad_norm": 9.5625,
"learning_rate": 3.280749763844293e-07,
"loss": 1.3896,
"step": 313
},
{
"epoch": 0.9303703703703704,
"grad_norm": 9.4375,
"learning_rate": 3.034497346841958e-07,
"loss": 1.4369,
"step": 314
},
{
"epoch": 0.9333333333333333,
"grad_norm": 9.0,
"learning_rate": 2.7977085919589253e-07,
"loss": 1.4707,
"step": 315
},
{
"epoch": 0.9362962962962963,
"grad_norm": 8.875,
"learning_rate": 2.570406609360221e-07,
"loss": 1.2772,
"step": 316
},
{
"epoch": 0.9392592592592592,
"grad_norm": 9.6875,
"learning_rate": 2.3526135833186527e-07,
"loss": 1.3784,
"step": 317
},
{
"epoch": 0.9422222222222222,
"grad_norm": 9.3125,
"learning_rate": 2.1443507700495968e-07,
"loss": 1.3823,
"step": 318
},
{
"epoch": 0.9451851851851852,
"grad_norm": 9.375,
"learning_rate": 1.9456384956365149e-07,
"loss": 1.3478,
"step": 319
},
{
"epoch": 0.9481481481481482,
"grad_norm": 9.875,
"learning_rate": 1.7564961540470492e-07,
"loss": 1.3708,
"step": 320
},
{
"epoch": 0.9511111111111111,
"grad_norm": 9.125,
"learning_rate": 1.5769422052403172e-07,
"loss": 1.2729,
"step": 321
},
{
"epoch": 0.9540740740740741,
"grad_norm": 9.3125,
"learning_rate": 1.4069941733651948e-07,
"loss": 1.3488,
"step": 322
},
{
"epoch": 0.957037037037037,
"grad_norm": 9.0,
"learning_rate": 1.2466686450499866e-07,
"loss": 1.3308,
"step": 323
},
{
"epoch": 0.96,
"grad_norm": 9.1875,
"learning_rate": 1.0959812677835968e-07,
"loss": 1.3702,
"step": 324
},
{
"epoch": 0.9629629629629629,
"grad_norm": 9.125,
"learning_rate": 9.549467483884412e-08,
"loss": 1.2894,
"step": 325
},
{
"epoch": 0.965925925925926,
"grad_norm": 9.6875,
"learning_rate": 8.235788515849607e-08,
"loss": 1.4597,
"step": 326
},
{
"epoch": 0.9688888888888889,
"grad_norm": 8.9375,
"learning_rate": 7.018903986483083e-08,
"loss": 1.3452,
"step": 327
},
{
"epoch": 0.9718518518518519,
"grad_norm": 9.1875,
"learning_rate": 5.89893266156949e-08,
"loss": 1.3317,
"step": 328
},
{
"epoch": 0.9748148148148148,
"grad_norm": 9.9375,
"learning_rate": 4.8759838483358745e-08,
"loss": 1.325,
"step": 329
},
{
"epoch": 0.9777777777777777,
"grad_norm": 9.125,
"learning_rate": 3.950157384783104e-08,
"loss": 1.3164,
"step": 330
},
{
"epoch": 0.9807407407407407,
"grad_norm": 9.4375,
"learning_rate": 3.12154362994177e-08,
"loss": 1.4195,
"step": 331
},
{
"epoch": 0.9837037037037037,
"grad_norm": 9.375,
"learning_rate": 2.3902234550536862e-08,
"loss": 1.3961,
"step": 332
},
{
"epoch": 0.9866666666666667,
"grad_norm": 9.1875,
"learning_rate": 1.7562682356786488e-08,
"loss": 1.2901,
"step": 333
},
{
"epoch": 0.9896296296296296,
"grad_norm": 10.0,
"learning_rate": 1.2197398447283404e-08,
"loss": 1.5034,
"step": 334
},
{
"epoch": 0.9925925925925926,
"grad_norm": 10.0625,
"learning_rate": 7.806906464281617e-09,
"loss": 1.4242,
"step": 335
},
{
"epoch": 0.9955555555555555,
"grad_norm": 9.625,
"learning_rate": 4.39163491205652e-09,
"loss": 1.4248,
"step": 336
},
{
"epoch": 0.9985185185185185,
"grad_norm": 9.8125,
"learning_rate": 1.951917115091684e-09,
"loss": 1.3997,
"step": 337
},
{
"epoch": 1.0,
"grad_norm": 13.5625,
"learning_rate": 4.879911855426578e-10,
"loss": 1.3577,
"step": 338
}
],
"logging_steps": 1,
"max_steps": 338,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 10000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5614551709925376.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}