sedrickkeh's picture
End of training
7a9faeb verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.99360341151386,
"eval_steps": 500,
"global_step": 1170,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0042643923240938165,
"grad_norm": 5.762583588325692,
"learning_rate": 3.4188034188034194e-07,
"loss": 0.8544,
"step": 1
},
{
"epoch": 0.008528784648187633,
"grad_norm": 5.762853501660702,
"learning_rate": 6.837606837606839e-07,
"loss": 0.8386,
"step": 2
},
{
"epoch": 0.01279317697228145,
"grad_norm": 5.9677523180556635,
"learning_rate": 1.0256410256410257e-06,
"loss": 0.884,
"step": 3
},
{
"epoch": 0.017057569296375266,
"grad_norm": 5.699941904390049,
"learning_rate": 1.3675213675213678e-06,
"loss": 0.8563,
"step": 4
},
{
"epoch": 0.021321961620469083,
"grad_norm": 5.62168022349983,
"learning_rate": 1.7094017094017097e-06,
"loss": 0.852,
"step": 5
},
{
"epoch": 0.0255863539445629,
"grad_norm": 5.493702356483672,
"learning_rate": 2.0512820512820513e-06,
"loss": 0.8753,
"step": 6
},
{
"epoch": 0.029850746268656716,
"grad_norm": 4.303897748944961,
"learning_rate": 2.3931623931623937e-06,
"loss": 0.8191,
"step": 7
},
{
"epoch": 0.03411513859275053,
"grad_norm": 3.9794175111660945,
"learning_rate": 2.7350427350427355e-06,
"loss": 0.7877,
"step": 8
},
{
"epoch": 0.03837953091684435,
"grad_norm": 2.3816462501227518,
"learning_rate": 3.0769230769230774e-06,
"loss": 0.8057,
"step": 9
},
{
"epoch": 0.042643923240938165,
"grad_norm": 2.0788392465970302,
"learning_rate": 3.4188034188034193e-06,
"loss": 0.7537,
"step": 10
},
{
"epoch": 0.046908315565031986,
"grad_norm": 1.8863166395033435,
"learning_rate": 3.760683760683761e-06,
"loss": 0.7445,
"step": 11
},
{
"epoch": 0.0511727078891258,
"grad_norm": 3.5536033903037505,
"learning_rate": 4.102564102564103e-06,
"loss": 0.7596,
"step": 12
},
{
"epoch": 0.05543710021321962,
"grad_norm": 3.651015786009793,
"learning_rate": 4.444444444444444e-06,
"loss": 0.7396,
"step": 13
},
{
"epoch": 0.05970149253731343,
"grad_norm": 3.5280670262633596,
"learning_rate": 4.786324786324787e-06,
"loss": 0.7307,
"step": 14
},
{
"epoch": 0.06396588486140725,
"grad_norm": 3.1262409040889914,
"learning_rate": 5.128205128205128e-06,
"loss": 0.7104,
"step": 15
},
{
"epoch": 0.06823027718550106,
"grad_norm": 2.89142473527489,
"learning_rate": 5.470085470085471e-06,
"loss": 0.6875,
"step": 16
},
{
"epoch": 0.07249466950959488,
"grad_norm": 2.4647987498160244,
"learning_rate": 5.8119658119658126e-06,
"loss": 0.6755,
"step": 17
},
{
"epoch": 0.0767590618336887,
"grad_norm": 1.8250668337679865,
"learning_rate": 6.153846153846155e-06,
"loss": 0.6897,
"step": 18
},
{
"epoch": 0.08102345415778252,
"grad_norm": 1.4024377137744832,
"learning_rate": 6.495726495726496e-06,
"loss": 0.6613,
"step": 19
},
{
"epoch": 0.08528784648187633,
"grad_norm": 1.3704415928007279,
"learning_rate": 6.837606837606839e-06,
"loss": 0.6393,
"step": 20
},
{
"epoch": 0.08955223880597014,
"grad_norm": 1.6980228810730489,
"learning_rate": 7.17948717948718e-06,
"loss": 0.6399,
"step": 21
},
{
"epoch": 0.09381663113006397,
"grad_norm": 1.6402395462904322,
"learning_rate": 7.521367521367522e-06,
"loss": 0.6195,
"step": 22
},
{
"epoch": 0.09808102345415778,
"grad_norm": 1.2780132829684268,
"learning_rate": 7.863247863247863e-06,
"loss": 0.6235,
"step": 23
},
{
"epoch": 0.1023454157782516,
"grad_norm": 1.023680566259576,
"learning_rate": 8.205128205128205e-06,
"loss": 0.6236,
"step": 24
},
{
"epoch": 0.10660980810234541,
"grad_norm": 1.0592925294970703,
"learning_rate": 8.547008547008548e-06,
"loss": 0.6317,
"step": 25
},
{
"epoch": 0.11087420042643924,
"grad_norm": 1.2196353358103003,
"learning_rate": 8.888888888888888e-06,
"loss": 0.6061,
"step": 26
},
{
"epoch": 0.11513859275053305,
"grad_norm": 0.9600456404562788,
"learning_rate": 9.230769230769232e-06,
"loss": 0.6225,
"step": 27
},
{
"epoch": 0.11940298507462686,
"grad_norm": 0.6975540635958875,
"learning_rate": 9.572649572649575e-06,
"loss": 0.5981,
"step": 28
},
{
"epoch": 0.12366737739872068,
"grad_norm": 0.9833632649277261,
"learning_rate": 9.914529914529915e-06,
"loss": 0.5908,
"step": 29
},
{
"epoch": 0.1279317697228145,
"grad_norm": 0.9787682562634072,
"learning_rate": 1.0256410256410256e-05,
"loss": 0.5978,
"step": 30
},
{
"epoch": 0.13219616204690832,
"grad_norm": 0.7131374700897437,
"learning_rate": 1.05982905982906e-05,
"loss": 0.5638,
"step": 31
},
{
"epoch": 0.13646055437100213,
"grad_norm": 0.776325884804065,
"learning_rate": 1.0940170940170942e-05,
"loss": 0.5913,
"step": 32
},
{
"epoch": 0.14072494669509594,
"grad_norm": 0.8028754922062066,
"learning_rate": 1.1282051282051283e-05,
"loss": 0.6077,
"step": 33
},
{
"epoch": 0.14498933901918976,
"grad_norm": 0.765715406571868,
"learning_rate": 1.1623931623931625e-05,
"loss": 0.5749,
"step": 34
},
{
"epoch": 0.14925373134328357,
"grad_norm": 0.6112531001869583,
"learning_rate": 1.1965811965811966e-05,
"loss": 0.5503,
"step": 35
},
{
"epoch": 0.1535181236673774,
"grad_norm": 0.6194576345031709,
"learning_rate": 1.230769230769231e-05,
"loss": 0.5765,
"step": 36
},
{
"epoch": 0.15778251599147122,
"grad_norm": 0.6976498059588975,
"learning_rate": 1.264957264957265e-05,
"loss": 0.5592,
"step": 37
},
{
"epoch": 0.16204690831556504,
"grad_norm": 0.5878952789888727,
"learning_rate": 1.2991452991452993e-05,
"loss": 0.553,
"step": 38
},
{
"epoch": 0.16631130063965885,
"grad_norm": 0.721834531098246,
"learning_rate": 1.3333333333333333e-05,
"loss": 0.5866,
"step": 39
},
{
"epoch": 0.17057569296375266,
"grad_norm": 0.572927984090807,
"learning_rate": 1.3675213675213677e-05,
"loss": 0.5439,
"step": 40
},
{
"epoch": 0.17484008528784648,
"grad_norm": 0.5998067556843685,
"learning_rate": 1.4017094017094018e-05,
"loss": 0.5702,
"step": 41
},
{
"epoch": 0.1791044776119403,
"grad_norm": 0.619560266999832,
"learning_rate": 1.435897435897436e-05,
"loss": 0.5709,
"step": 42
},
{
"epoch": 0.18336886993603413,
"grad_norm": 0.5985922697985028,
"learning_rate": 1.4700854700854703e-05,
"loss": 0.5607,
"step": 43
},
{
"epoch": 0.18763326226012794,
"grad_norm": 0.6755231307052864,
"learning_rate": 1.5042735042735043e-05,
"loss": 0.591,
"step": 44
},
{
"epoch": 0.19189765458422176,
"grad_norm": 0.5611610809137622,
"learning_rate": 1.5384615384615387e-05,
"loss": 0.557,
"step": 45
},
{
"epoch": 0.19616204690831557,
"grad_norm": 0.6547322304669131,
"learning_rate": 1.5726495726495726e-05,
"loss": 0.5436,
"step": 46
},
{
"epoch": 0.20042643923240938,
"grad_norm": 0.5654181199007654,
"learning_rate": 1.6068376068376072e-05,
"loss": 0.5555,
"step": 47
},
{
"epoch": 0.2046908315565032,
"grad_norm": 0.6043389720348237,
"learning_rate": 1.641025641025641e-05,
"loss": 0.5219,
"step": 48
},
{
"epoch": 0.208955223880597,
"grad_norm": 0.604378677169402,
"learning_rate": 1.6752136752136753e-05,
"loss": 0.5316,
"step": 49
},
{
"epoch": 0.21321961620469082,
"grad_norm": 0.6156791403933493,
"learning_rate": 1.7094017094017095e-05,
"loss": 0.5806,
"step": 50
},
{
"epoch": 0.21748400852878466,
"grad_norm": 0.5311982638264362,
"learning_rate": 1.7435897435897438e-05,
"loss": 0.5119,
"step": 51
},
{
"epoch": 0.22174840085287847,
"grad_norm": 0.6806043408167816,
"learning_rate": 1.7777777777777777e-05,
"loss": 0.5427,
"step": 52
},
{
"epoch": 0.2260127931769723,
"grad_norm": 0.5268262684314252,
"learning_rate": 1.8119658119658122e-05,
"loss": 0.5113,
"step": 53
},
{
"epoch": 0.2302771855010661,
"grad_norm": 0.5125041865303465,
"learning_rate": 1.8461538461538465e-05,
"loss": 0.5527,
"step": 54
},
{
"epoch": 0.2345415778251599,
"grad_norm": 0.5544301707286969,
"learning_rate": 1.8803418803418804e-05,
"loss": 0.5293,
"step": 55
},
{
"epoch": 0.23880597014925373,
"grad_norm": 0.5776950245039686,
"learning_rate": 1.914529914529915e-05,
"loss": 0.5365,
"step": 56
},
{
"epoch": 0.24307036247334754,
"grad_norm": 0.5419276025054381,
"learning_rate": 1.9487179487179488e-05,
"loss": 0.5159,
"step": 57
},
{
"epoch": 0.24733475479744135,
"grad_norm": 0.587019298382551,
"learning_rate": 1.982905982905983e-05,
"loss": 0.5579,
"step": 58
},
{
"epoch": 0.2515991471215352,
"grad_norm": 0.558245566849885,
"learning_rate": 2.0170940170940173e-05,
"loss": 0.5227,
"step": 59
},
{
"epoch": 0.255863539445629,
"grad_norm": 0.5636197758898119,
"learning_rate": 2.0512820512820512e-05,
"loss": 0.5242,
"step": 60
},
{
"epoch": 0.2601279317697228,
"grad_norm": 0.5819473915079625,
"learning_rate": 2.0854700854700857e-05,
"loss": 0.4985,
"step": 61
},
{
"epoch": 0.26439232409381663,
"grad_norm": 0.6585083765912015,
"learning_rate": 2.11965811965812e-05,
"loss": 0.5361,
"step": 62
},
{
"epoch": 0.26865671641791045,
"grad_norm": 0.6369197447630097,
"learning_rate": 2.153846153846154e-05,
"loss": 0.5278,
"step": 63
},
{
"epoch": 0.27292110874200426,
"grad_norm": 0.6947872378280153,
"learning_rate": 2.1880341880341884e-05,
"loss": 0.5544,
"step": 64
},
{
"epoch": 0.2771855010660981,
"grad_norm": 0.7348638961603595,
"learning_rate": 2.2222222222222227e-05,
"loss": 0.5345,
"step": 65
},
{
"epoch": 0.2814498933901919,
"grad_norm": 0.5897299352701553,
"learning_rate": 2.2564102564102566e-05,
"loss": 0.494,
"step": 66
},
{
"epoch": 0.2857142857142857,
"grad_norm": 0.4833992909133447,
"learning_rate": 2.2905982905982908e-05,
"loss": 0.508,
"step": 67
},
{
"epoch": 0.2899786780383795,
"grad_norm": 0.9014759038348941,
"learning_rate": 2.324786324786325e-05,
"loss": 0.541,
"step": 68
},
{
"epoch": 0.2942430703624733,
"grad_norm": 0.766201358388276,
"learning_rate": 2.3589743589743593e-05,
"loss": 0.511,
"step": 69
},
{
"epoch": 0.29850746268656714,
"grad_norm": 0.6763470514557186,
"learning_rate": 2.393162393162393e-05,
"loss": 0.5069,
"step": 70
},
{
"epoch": 0.302771855010661,
"grad_norm": 0.7960304510774352,
"learning_rate": 2.4273504273504274e-05,
"loss": 0.5029,
"step": 71
},
{
"epoch": 0.3070362473347548,
"grad_norm": 0.666138666582889,
"learning_rate": 2.461538461538462e-05,
"loss": 0.5145,
"step": 72
},
{
"epoch": 0.31130063965884863,
"grad_norm": 0.5806980890439299,
"learning_rate": 2.495726495726496e-05,
"loss": 0.5213,
"step": 73
},
{
"epoch": 0.31556503198294245,
"grad_norm": 0.6186803475054912,
"learning_rate": 2.52991452991453e-05,
"loss": 0.5092,
"step": 74
},
{
"epoch": 0.31982942430703626,
"grad_norm": 0.737335241924766,
"learning_rate": 2.5641025641025646e-05,
"loss": 0.5408,
"step": 75
},
{
"epoch": 0.32409381663113007,
"grad_norm": 0.7943077140830255,
"learning_rate": 2.5982905982905985e-05,
"loss": 0.5693,
"step": 76
},
{
"epoch": 0.3283582089552239,
"grad_norm": 0.7311735920812683,
"learning_rate": 2.6324786324786328e-05,
"loss": 0.5214,
"step": 77
},
{
"epoch": 0.3326226012793177,
"grad_norm": 0.650892143783386,
"learning_rate": 2.6666666666666667e-05,
"loss": 0.5414,
"step": 78
},
{
"epoch": 0.3368869936034115,
"grad_norm": 0.8822230320163791,
"learning_rate": 2.7008547008547012e-05,
"loss": 0.523,
"step": 79
},
{
"epoch": 0.3411513859275053,
"grad_norm": 0.8072091437291843,
"learning_rate": 2.7350427350427355e-05,
"loss": 0.5305,
"step": 80
},
{
"epoch": 0.34541577825159914,
"grad_norm": 0.8096540761124144,
"learning_rate": 2.7692307692307694e-05,
"loss": 0.5179,
"step": 81
},
{
"epoch": 0.34968017057569295,
"grad_norm": 0.7668789963017784,
"learning_rate": 2.8034188034188036e-05,
"loss": 0.533,
"step": 82
},
{
"epoch": 0.35394456289978676,
"grad_norm": 0.8780947367628191,
"learning_rate": 2.8376068376068378e-05,
"loss": 0.5135,
"step": 83
},
{
"epoch": 0.3582089552238806,
"grad_norm": 0.8644912878031398,
"learning_rate": 2.871794871794872e-05,
"loss": 0.518,
"step": 84
},
{
"epoch": 0.3624733475479744,
"grad_norm": 0.8027366679366013,
"learning_rate": 2.9059829059829063e-05,
"loss": 0.5383,
"step": 85
},
{
"epoch": 0.36673773987206826,
"grad_norm": 0.7951556664027158,
"learning_rate": 2.9401709401709405e-05,
"loss": 0.5177,
"step": 86
},
{
"epoch": 0.37100213219616207,
"grad_norm": 1.0886293807161818,
"learning_rate": 2.9743589743589747e-05,
"loss": 0.5089,
"step": 87
},
{
"epoch": 0.3752665245202559,
"grad_norm": 0.9060369709344223,
"learning_rate": 3.0085470085470086e-05,
"loss": 0.4971,
"step": 88
},
{
"epoch": 0.3795309168443497,
"grad_norm": 0.8715123131973772,
"learning_rate": 3.042735042735043e-05,
"loss": 0.5417,
"step": 89
},
{
"epoch": 0.3837953091684435,
"grad_norm": 1.0361588473316101,
"learning_rate": 3.0769230769230774e-05,
"loss": 0.4945,
"step": 90
},
{
"epoch": 0.3880597014925373,
"grad_norm": 0.816413638766369,
"learning_rate": 3.111111111111112e-05,
"loss": 0.5044,
"step": 91
},
{
"epoch": 0.39232409381663114,
"grad_norm": 0.9700049358178698,
"learning_rate": 3.145299145299145e-05,
"loss": 0.5202,
"step": 92
},
{
"epoch": 0.39658848614072495,
"grad_norm": 1.268167713035034,
"learning_rate": 3.1794871794871795e-05,
"loss": 0.5295,
"step": 93
},
{
"epoch": 0.40085287846481876,
"grad_norm": 0.7278122145603797,
"learning_rate": 3.2136752136752144e-05,
"loss": 0.5043,
"step": 94
},
{
"epoch": 0.4051172707889126,
"grad_norm": 1.5360813524930013,
"learning_rate": 3.247863247863248e-05,
"loss": 0.5026,
"step": 95
},
{
"epoch": 0.4093816631130064,
"grad_norm": 0.9753666203897736,
"learning_rate": 3.282051282051282e-05,
"loss": 0.5188,
"step": 96
},
{
"epoch": 0.4136460554371002,
"grad_norm": 1.4100688600112532,
"learning_rate": 3.316239316239317e-05,
"loss": 0.5144,
"step": 97
},
{
"epoch": 0.417910447761194,
"grad_norm": 1.1259322179404374,
"learning_rate": 3.3504273504273506e-05,
"loss": 0.507,
"step": 98
},
{
"epoch": 0.42217484008528783,
"grad_norm": 1.2305441221747766,
"learning_rate": 3.384615384615385e-05,
"loss": 0.5241,
"step": 99
},
{
"epoch": 0.42643923240938164,
"grad_norm": 1.2748472488214255,
"learning_rate": 3.418803418803419e-05,
"loss": 0.5124,
"step": 100
},
{
"epoch": 0.43070362473347545,
"grad_norm": 0.9865825760085118,
"learning_rate": 3.452991452991453e-05,
"loss": 0.5106,
"step": 101
},
{
"epoch": 0.4349680170575693,
"grad_norm": 0.9763228928414522,
"learning_rate": 3.4871794871794875e-05,
"loss": 0.4896,
"step": 102
},
{
"epoch": 0.43923240938166314,
"grad_norm": 1.1225185277789729,
"learning_rate": 3.521367521367522e-05,
"loss": 0.4981,
"step": 103
},
{
"epoch": 0.44349680170575695,
"grad_norm": 1.1094174750241743,
"learning_rate": 3.555555555555555e-05,
"loss": 0.5062,
"step": 104
},
{
"epoch": 0.44776119402985076,
"grad_norm": 0.898347373542904,
"learning_rate": 3.58974358974359e-05,
"loss": 0.5007,
"step": 105
},
{
"epoch": 0.4520255863539446,
"grad_norm": 0.773624200108943,
"learning_rate": 3.6239316239316245e-05,
"loss": 0.5025,
"step": 106
},
{
"epoch": 0.4562899786780384,
"grad_norm": 1.0750778962974719,
"learning_rate": 3.658119658119658e-05,
"loss": 0.5208,
"step": 107
},
{
"epoch": 0.4605543710021322,
"grad_norm": 0.9050953116899519,
"learning_rate": 3.692307692307693e-05,
"loss": 0.5276,
"step": 108
},
{
"epoch": 0.464818763326226,
"grad_norm": 0.7021746088531706,
"learning_rate": 3.726495726495727e-05,
"loss": 0.5052,
"step": 109
},
{
"epoch": 0.4690831556503198,
"grad_norm": 0.7199013432957416,
"learning_rate": 3.760683760683761e-05,
"loss": 0.5255,
"step": 110
},
{
"epoch": 0.47334754797441364,
"grad_norm": 1.1160990501561305,
"learning_rate": 3.794871794871795e-05,
"loss": 0.4983,
"step": 111
},
{
"epoch": 0.47761194029850745,
"grad_norm": 1.0555937817011538,
"learning_rate": 3.82905982905983e-05,
"loss": 0.5124,
"step": 112
},
{
"epoch": 0.48187633262260127,
"grad_norm": 1.0150732063928598,
"learning_rate": 3.8632478632478634e-05,
"loss": 0.4883,
"step": 113
},
{
"epoch": 0.4861407249466951,
"grad_norm": 0.8697583364801937,
"learning_rate": 3.8974358974358976e-05,
"loss": 0.5292,
"step": 114
},
{
"epoch": 0.4904051172707889,
"grad_norm": 0.9185038001613525,
"learning_rate": 3.931623931623932e-05,
"loss": 0.5174,
"step": 115
},
{
"epoch": 0.4946695095948827,
"grad_norm": 0.9745755865848704,
"learning_rate": 3.965811965811966e-05,
"loss": 0.5147,
"step": 116
},
{
"epoch": 0.4989339019189765,
"grad_norm": 0.6983551544056755,
"learning_rate": 4e-05,
"loss": 0.5027,
"step": 117
},
{
"epoch": 0.5031982942430704,
"grad_norm": 0.9208847354860332,
"learning_rate": 3.9999910989204816e-05,
"loss": 0.5286,
"step": 118
},
{
"epoch": 0.5074626865671642,
"grad_norm": 1.2397233900658762,
"learning_rate": 3.999964395761154e-05,
"loss": 0.5085,
"step": 119
},
{
"epoch": 0.511727078891258,
"grad_norm": 0.7942041370107185,
"learning_rate": 3.9999198907597046e-05,
"loss": 0.5293,
"step": 120
},
{
"epoch": 0.5159914712153518,
"grad_norm": 1.5080290506230445,
"learning_rate": 3.999857584312277e-05,
"loss": 0.5222,
"step": 121
},
{
"epoch": 0.5202558635394456,
"grad_norm": 0.9438008695990223,
"learning_rate": 3.9997774769734646e-05,
"loss": 0.5076,
"step": 122
},
{
"epoch": 0.5245202558635395,
"grad_norm": 1.2363148275202405,
"learning_rate": 3.9996795694563096e-05,
"loss": 0.5265,
"step": 123
},
{
"epoch": 0.5287846481876333,
"grad_norm": 0.8260779515257772,
"learning_rate": 3.999563862632295e-05,
"loss": 0.5052,
"step": 124
},
{
"epoch": 0.5330490405117271,
"grad_norm": 1.0383706258239225,
"learning_rate": 3.9994303575313356e-05,
"loss": 0.4833,
"step": 125
},
{
"epoch": 0.5373134328358209,
"grad_norm": 1.0731088546315215,
"learning_rate": 3.999279055341771e-05,
"loss": 0.5088,
"step": 126
},
{
"epoch": 0.5415778251599147,
"grad_norm": 0.8305037164926722,
"learning_rate": 3.9991099574103544e-05,
"loss": 0.4969,
"step": 127
},
{
"epoch": 0.5458422174840085,
"grad_norm": 1.1543261349619998,
"learning_rate": 3.9989230652422407e-05,
"loss": 0.4858,
"step": 128
},
{
"epoch": 0.5501066098081023,
"grad_norm": 0.911733958800851,
"learning_rate": 3.998718380500971e-05,
"loss": 0.4867,
"step": 129
},
{
"epoch": 0.5543710021321961,
"grad_norm": 0.9484447965879755,
"learning_rate": 3.998495905008461e-05,
"loss": 0.5108,
"step": 130
},
{
"epoch": 0.55863539445629,
"grad_norm": 0.7766564927463866,
"learning_rate": 3.9982556407449816e-05,
"loss": 0.4779,
"step": 131
},
{
"epoch": 0.5628997867803838,
"grad_norm": 0.8227852649914328,
"learning_rate": 3.997997589849145e-05,
"loss": 0.488,
"step": 132
},
{
"epoch": 0.5671641791044776,
"grad_norm": 0.9370342689098532,
"learning_rate": 3.9977217546178825e-05,
"loss": 0.5293,
"step": 133
},
{
"epoch": 0.5714285714285714,
"grad_norm": 0.8852132441099798,
"learning_rate": 3.997428137506426e-05,
"loss": 0.4949,
"step": 134
},
{
"epoch": 0.5756929637526652,
"grad_norm": 0.7412646580333639,
"learning_rate": 3.9971167411282835e-05,
"loss": 0.5054,
"step": 135
},
{
"epoch": 0.579957356076759,
"grad_norm": 0.751878959140086,
"learning_rate": 3.9967875682552206e-05,
"loss": 0.5219,
"step": 136
},
{
"epoch": 0.5842217484008528,
"grad_norm": 0.6519295683576232,
"learning_rate": 3.9964406218172305e-05,
"loss": 0.5182,
"step": 137
},
{
"epoch": 0.5884861407249466,
"grad_norm": 0.6755524662705334,
"learning_rate": 3.99607590490251e-05,
"loss": 0.5039,
"step": 138
},
{
"epoch": 0.5927505330490405,
"grad_norm": 0.8347488695469496,
"learning_rate": 3.995693420757435e-05,
"loss": 0.4946,
"step": 139
},
{
"epoch": 0.5970149253731343,
"grad_norm": 0.9427351754391033,
"learning_rate": 3.9952931727865264e-05,
"loss": 0.5128,
"step": 140
},
{
"epoch": 0.6012793176972282,
"grad_norm": 1.055031036692266,
"learning_rate": 3.9948751645524235e-05,
"loss": 0.4946,
"step": 141
},
{
"epoch": 0.605543710021322,
"grad_norm": 1.23006352166961,
"learning_rate": 3.994439399775851e-05,
"loss": 0.522,
"step": 142
},
{
"epoch": 0.6098081023454158,
"grad_norm": 0.9715538904791429,
"learning_rate": 3.993985882335584e-05,
"loss": 0.5226,
"step": 143
},
{
"epoch": 0.6140724946695096,
"grad_norm": 0.7501049697251858,
"learning_rate": 3.9935146162684206e-05,
"loss": 0.493,
"step": 144
},
{
"epoch": 0.6183368869936035,
"grad_norm": 0.8701421851852503,
"learning_rate": 3.993025605769135e-05,
"loss": 0.4963,
"step": 145
},
{
"epoch": 0.6226012793176973,
"grad_norm": 1.2467070738538217,
"learning_rate": 3.992518855190449e-05,
"loss": 0.4952,
"step": 146
},
{
"epoch": 0.6268656716417911,
"grad_norm": 0.8206473558009,
"learning_rate": 3.9919943690429906e-05,
"loss": 0.5246,
"step": 147
},
{
"epoch": 0.6311300639658849,
"grad_norm": 1.095920307115201,
"learning_rate": 3.991452151995252e-05,
"loss": 0.489,
"step": 148
},
{
"epoch": 0.6353944562899787,
"grad_norm": 0.9862679683535909,
"learning_rate": 3.9908922088735504e-05,
"loss": 0.5258,
"step": 149
},
{
"epoch": 0.6396588486140725,
"grad_norm": 1.0605259536794247,
"learning_rate": 3.9903145446619837e-05,
"loss": 0.4901,
"step": 150
},
{
"epoch": 0.6439232409381663,
"grad_norm": 0.9963084800854008,
"learning_rate": 3.989719164502388e-05,
"loss": 0.4998,
"step": 151
},
{
"epoch": 0.6481876332622601,
"grad_norm": 0.9149684222988724,
"learning_rate": 3.989106073694289e-05,
"loss": 0.5103,
"step": 152
},
{
"epoch": 0.652452025586354,
"grad_norm": 0.9657524344033639,
"learning_rate": 3.9884752776948564e-05,
"loss": 0.5296,
"step": 153
},
{
"epoch": 0.6567164179104478,
"grad_norm": 0.8189190963813775,
"learning_rate": 3.987826782118855e-05,
"loss": 0.497,
"step": 154
},
{
"epoch": 0.6609808102345416,
"grad_norm": 0.8044373372459764,
"learning_rate": 3.9871605927385976e-05,
"loss": 0.5089,
"step": 155
},
{
"epoch": 0.6652452025586354,
"grad_norm": 0.7698309044015865,
"learning_rate": 3.9864767154838864e-05,
"loss": 0.4952,
"step": 156
},
{
"epoch": 0.6695095948827292,
"grad_norm": 0.9001207944326993,
"learning_rate": 3.985775156441968e-05,
"loss": 0.4991,
"step": 157
},
{
"epoch": 0.673773987206823,
"grad_norm": 0.6240493911350159,
"learning_rate": 3.985055921857477e-05,
"loss": 0.4973,
"step": 158
},
{
"epoch": 0.6780383795309168,
"grad_norm": 0.8228062170958025,
"learning_rate": 3.9843190181323744e-05,
"loss": 0.5416,
"step": 159
},
{
"epoch": 0.6823027718550106,
"grad_norm": 0.7864730645574066,
"learning_rate": 3.9835644518259015e-05,
"loss": 0.5052,
"step": 160
},
{
"epoch": 0.6865671641791045,
"grad_norm": 0.7692469509420303,
"learning_rate": 3.982792229654512e-05,
"loss": 0.4863,
"step": 161
},
{
"epoch": 0.6908315565031983,
"grad_norm": 0.809929218928369,
"learning_rate": 3.982002358491817e-05,
"loss": 0.4896,
"step": 162
},
{
"epoch": 0.6950959488272921,
"grad_norm": 0.8299232929459466,
"learning_rate": 3.981194845368523e-05,
"loss": 0.5059,
"step": 163
},
{
"epoch": 0.6993603411513859,
"grad_norm": 1.429255919088972,
"learning_rate": 3.980369697472367e-05,
"loss": 0.5099,
"step": 164
},
{
"epoch": 0.7036247334754797,
"grad_norm": 0.872846079343231,
"learning_rate": 3.979526922148058e-05,
"loss": 0.518,
"step": 165
},
{
"epoch": 0.7078891257995735,
"grad_norm": 0.7368088843338214,
"learning_rate": 3.978666526897204e-05,
"loss": 0.49,
"step": 166
},
{
"epoch": 0.7121535181236673,
"grad_norm": 0.9306889404234494,
"learning_rate": 3.9777885193782534e-05,
"loss": 0.5197,
"step": 167
},
{
"epoch": 0.7164179104477612,
"grad_norm": 0.9830777592344603,
"learning_rate": 3.9768929074064206e-05,
"loss": 0.5035,
"step": 168
},
{
"epoch": 0.720682302771855,
"grad_norm": 1.9067676890157668,
"learning_rate": 3.9759796989536185e-05,
"loss": 0.504,
"step": 169
},
{
"epoch": 0.7249466950959488,
"grad_norm": 1.2261524546903462,
"learning_rate": 3.975048902148388e-05,
"loss": 0.4878,
"step": 170
},
{
"epoch": 0.7292110874200426,
"grad_norm": 1.2878696480150222,
"learning_rate": 3.9741005252758255e-05,
"loss": 0.4701,
"step": 171
},
{
"epoch": 0.7334754797441365,
"grad_norm": 1.1029539614135186,
"learning_rate": 3.9731345767775105e-05,
"loss": 0.5018,
"step": 172
},
{
"epoch": 0.7377398720682303,
"grad_norm": 1.404966776096553,
"learning_rate": 3.972151065251425e-05,
"loss": 0.4853,
"step": 173
},
{
"epoch": 0.7420042643923241,
"grad_norm": 1.1681953067483049,
"learning_rate": 3.971149999451886e-05,
"loss": 0.4736,
"step": 174
},
{
"epoch": 0.746268656716418,
"grad_norm": 1.054326724521683,
"learning_rate": 3.970131388289456e-05,
"loss": 0.4931,
"step": 175
},
{
"epoch": 0.7505330490405118,
"grad_norm": 1.3228718253060476,
"learning_rate": 3.9690952408308775e-05,
"loss": 0.4908,
"step": 176
},
{
"epoch": 0.7547974413646056,
"grad_norm": 0.8356955846154145,
"learning_rate": 3.9680415662989806e-05,
"loss": 0.4866,
"step": 177
},
{
"epoch": 0.7590618336886994,
"grad_norm": 1.1751465063213684,
"learning_rate": 3.966970374072605e-05,
"loss": 0.5181,
"step": 178
},
{
"epoch": 0.7633262260127932,
"grad_norm": 0.8467287376160608,
"learning_rate": 3.9658816736865183e-05,
"loss": 0.502,
"step": 179
},
{
"epoch": 0.767590618336887,
"grad_norm": 0.9233742749028502,
"learning_rate": 3.9647754748313294e-05,
"loss": 0.4942,
"step": 180
},
{
"epoch": 0.7718550106609808,
"grad_norm": 0.778278596309507,
"learning_rate": 3.9636517873534025e-05,
"loss": 0.5079,
"step": 181
},
{
"epoch": 0.7761194029850746,
"grad_norm": 0.717645374386566,
"learning_rate": 3.9625106212547696e-05,
"loss": 0.5039,
"step": 182
},
{
"epoch": 0.7803837953091685,
"grad_norm": 0.7722997985001351,
"learning_rate": 3.96135198669304e-05,
"loss": 0.4899,
"step": 183
},
{
"epoch": 0.7846481876332623,
"grad_norm": 1.006001772306961,
"learning_rate": 3.960175893981312e-05,
"loss": 0.4909,
"step": 184
},
{
"epoch": 0.7889125799573561,
"grad_norm": 0.6651331133907898,
"learning_rate": 3.958982353588081e-05,
"loss": 0.4992,
"step": 185
},
{
"epoch": 0.7931769722814499,
"grad_norm": 0.9754898318183376,
"learning_rate": 3.957771376137144e-05,
"loss": 0.5103,
"step": 186
},
{
"epoch": 0.7974413646055437,
"grad_norm": 0.5728157064040578,
"learning_rate": 3.956542972407509e-05,
"loss": 0.4951,
"step": 187
},
{
"epoch": 0.8017057569296375,
"grad_norm": 0.7305936260428764,
"learning_rate": 3.955297153333294e-05,
"loss": 0.4786,
"step": 188
},
{
"epoch": 0.8059701492537313,
"grad_norm": 0.6733977139026949,
"learning_rate": 3.954033930003634e-05,
"loss": 0.4862,
"step": 189
},
{
"epoch": 0.8102345415778252,
"grad_norm": 0.5879575682316353,
"learning_rate": 3.952753313662581e-05,
"loss": 0.4995,
"step": 190
},
{
"epoch": 0.814498933901919,
"grad_norm": 0.7661837329326288,
"learning_rate": 3.9514553157090027e-05,
"loss": 0.4989,
"step": 191
},
{
"epoch": 0.8187633262260128,
"grad_norm": 0.700425176093609,
"learning_rate": 3.9501399476964806e-05,
"loss": 0.4638,
"step": 192
},
{
"epoch": 0.8230277185501066,
"grad_norm": 0.8958166865667093,
"learning_rate": 3.948807221333212e-05,
"loss": 0.4872,
"step": 193
},
{
"epoch": 0.8272921108742004,
"grad_norm": 0.8807166064400732,
"learning_rate": 3.947457148481899e-05,
"loss": 0.4934,
"step": 194
},
{
"epoch": 0.8315565031982942,
"grad_norm": 0.6091238805523633,
"learning_rate": 3.946089741159648e-05,
"loss": 0.5032,
"step": 195
},
{
"epoch": 0.835820895522388,
"grad_norm": 0.617575065646615,
"learning_rate": 3.94470501153786e-05,
"loss": 0.4925,
"step": 196
},
{
"epoch": 0.8400852878464818,
"grad_norm": 1.0204579677550079,
"learning_rate": 3.943302971942124e-05,
"loss": 0.4764,
"step": 197
},
{
"epoch": 0.8443496801705757,
"grad_norm": 0.7837781333209204,
"learning_rate": 3.9418836348521045e-05,
"loss": 0.5101,
"step": 198
},
{
"epoch": 0.8486140724946695,
"grad_norm": 0.811890861572903,
"learning_rate": 3.940447012901435e-05,
"loss": 0.4777,
"step": 199
},
{
"epoch": 0.8528784648187633,
"grad_norm": 0.8205952847062596,
"learning_rate": 3.9389931188776016e-05,
"loss": 0.5,
"step": 200
},
{
"epoch": 0.8571428571428571,
"grad_norm": 0.9742017513829521,
"learning_rate": 3.937521965721831e-05,
"loss": 0.4791,
"step": 201
},
{
"epoch": 0.8614072494669509,
"grad_norm": 0.8221657370452279,
"learning_rate": 3.9360335665289735e-05,
"loss": 0.4904,
"step": 202
},
{
"epoch": 0.8656716417910447,
"grad_norm": 0.7286486621235604,
"learning_rate": 3.9345279345473894e-05,
"loss": 0.497,
"step": 203
},
{
"epoch": 0.8699360341151386,
"grad_norm": 0.6333922650560874,
"learning_rate": 3.933005083178828e-05,
"loss": 0.4892,
"step": 204
},
{
"epoch": 0.8742004264392325,
"grad_norm": 0.8578052818677135,
"learning_rate": 3.9314650259783116e-05,
"loss": 0.5076,
"step": 205
},
{
"epoch": 0.8784648187633263,
"grad_norm": 0.750399410827557,
"learning_rate": 3.9299077766540106e-05,
"loss": 0.4999,
"step": 206
},
{
"epoch": 0.8827292110874201,
"grad_norm": 0.7444183300691442,
"learning_rate": 3.928333349067125e-05,
"loss": 0.5174,
"step": 207
},
{
"epoch": 0.8869936034115139,
"grad_norm": 0.7390293670816017,
"learning_rate": 3.926741757231761e-05,
"loss": 0.5088,
"step": 208
},
{
"epoch": 0.8912579957356077,
"grad_norm": 0.5895494036218917,
"learning_rate": 3.925133015314804e-05,
"loss": 0.5001,
"step": 209
},
{
"epoch": 0.8955223880597015,
"grad_norm": 0.6960049209107905,
"learning_rate": 3.923507137635792e-05,
"loss": 0.4615,
"step": 210
},
{
"epoch": 0.8997867803837953,
"grad_norm": 0.6037964081497676,
"learning_rate": 3.9218641386667935e-05,
"loss": 0.4833,
"step": 211
},
{
"epoch": 0.9040511727078892,
"grad_norm": 0.6639130289439265,
"learning_rate": 3.920204033032272e-05,
"loss": 0.5172,
"step": 212
},
{
"epoch": 0.908315565031983,
"grad_norm": 0.7469004136077626,
"learning_rate": 3.9185268355089606e-05,
"loss": 0.503,
"step": 213
},
{
"epoch": 0.9125799573560768,
"grad_norm": 0.9525137178233742,
"learning_rate": 3.916832561025727e-05,
"loss": 0.5039,
"step": 214
},
{
"epoch": 0.9168443496801706,
"grad_norm": 0.7093471875210987,
"learning_rate": 3.915121224663443e-05,
"loss": 0.4769,
"step": 215
},
{
"epoch": 0.9211087420042644,
"grad_norm": 0.8092572396863047,
"learning_rate": 3.913392841654851e-05,
"loss": 0.4798,
"step": 216
},
{
"epoch": 0.9253731343283582,
"grad_norm": 0.7715257865453511,
"learning_rate": 3.9116474273844233e-05,
"loss": 0.4801,
"step": 217
},
{
"epoch": 0.929637526652452,
"grad_norm": 0.8072195224151741,
"learning_rate": 3.909884997388234e-05,
"loss": 0.5066,
"step": 218
},
{
"epoch": 0.9339019189765458,
"grad_norm": 0.6747723474026673,
"learning_rate": 3.9081055673538093e-05,
"loss": 0.4869,
"step": 219
},
{
"epoch": 0.9381663113006397,
"grad_norm": 0.5535598548600815,
"learning_rate": 3.90630915312e-05,
"loss": 0.4847,
"step": 220
},
{
"epoch": 0.9424307036247335,
"grad_norm": 0.74876001608516,
"learning_rate": 3.904495770676831e-05,
"loss": 0.4779,
"step": 221
},
{
"epoch": 0.9466950959488273,
"grad_norm": 0.9083484919348656,
"learning_rate": 3.902665436165364e-05,
"loss": 0.4863,
"step": 222
},
{
"epoch": 0.9509594882729211,
"grad_norm": 0.6461488033648031,
"learning_rate": 3.900818165877552e-05,
"loss": 0.4784,
"step": 223
},
{
"epoch": 0.9552238805970149,
"grad_norm": 0.6754320741347062,
"learning_rate": 3.898953976256094e-05,
"loss": 0.472,
"step": 224
},
{
"epoch": 0.9594882729211087,
"grad_norm": 0.9580967189164344,
"learning_rate": 3.897072883894291e-05,
"loss": 0.5014,
"step": 225
},
{
"epoch": 0.9637526652452025,
"grad_norm": 0.6874808198271747,
"learning_rate": 3.895174905535896e-05,
"loss": 0.4874,
"step": 226
},
{
"epoch": 0.9680170575692963,
"grad_norm": 0.6832917066807791,
"learning_rate": 3.893260058074964e-05,
"loss": 0.475,
"step": 227
},
{
"epoch": 0.9722814498933902,
"grad_norm": 0.6575987132879606,
"learning_rate": 3.8913283585557054e-05,
"loss": 0.4826,
"step": 228
},
{
"epoch": 0.976545842217484,
"grad_norm": 0.6835011560464193,
"learning_rate": 3.8893798241723306e-05,
"loss": 0.4756,
"step": 229
},
{
"epoch": 0.9808102345415778,
"grad_norm": 0.6203307688676428,
"learning_rate": 3.8874144722689e-05,
"loss": 0.4854,
"step": 230
},
{
"epoch": 0.9850746268656716,
"grad_norm": 0.6892823863039171,
"learning_rate": 3.885432320339167e-05,
"loss": 0.495,
"step": 231
},
{
"epoch": 0.9893390191897654,
"grad_norm": 0.540532340018327,
"learning_rate": 3.883433386026422e-05,
"loss": 0.4868,
"step": 232
},
{
"epoch": 0.9936034115138592,
"grad_norm": 0.6964599188865301,
"learning_rate": 3.88141768712334e-05,
"loss": 0.4972,
"step": 233
},
{
"epoch": 0.997867803837953,
"grad_norm": 0.5625723596199244,
"learning_rate": 3.879385241571817e-05,
"loss": 0.4927,
"step": 234
},
{
"epoch": 1.0031982942430704,
"grad_norm": 1.020180894743182,
"learning_rate": 3.877336067462812e-05,
"loss": 0.7904,
"step": 235
},
{
"epoch": 1.007462686567164,
"grad_norm": 0.7065695413618637,
"learning_rate": 3.875270183036187e-05,
"loss": 0.4162,
"step": 236
},
{
"epoch": 1.011727078891258,
"grad_norm": 0.723667383252581,
"learning_rate": 3.873187606680543e-05,
"loss": 0.4699,
"step": 237
},
{
"epoch": 1.0159914712153517,
"grad_norm": 0.6883131102397942,
"learning_rate": 3.871088356933059e-05,
"loss": 0.418,
"step": 238
},
{
"epoch": 1.0202558635394456,
"grad_norm": 0.7771256561903841,
"learning_rate": 3.8689724524793224e-05,
"loss": 0.4763,
"step": 239
},
{
"epoch": 1.0245202558635393,
"grad_norm": 0.7452973304187979,
"learning_rate": 3.866839912153168e-05,
"loss": 0.4056,
"step": 240
},
{
"epoch": 1.0287846481876333,
"grad_norm": 0.6256137550373733,
"learning_rate": 3.864690754936506e-05,
"loss": 0.3945,
"step": 241
},
{
"epoch": 1.033049040511727,
"grad_norm": 0.8299580033525263,
"learning_rate": 3.862524999959157e-05,
"loss": 0.45,
"step": 242
},
{
"epoch": 1.037313432835821,
"grad_norm": 0.7637656058846947,
"learning_rate": 3.860342666498677e-05,
"loss": 0.4415,
"step": 243
},
{
"epoch": 1.0415778251599148,
"grad_norm": 0.6215398485067458,
"learning_rate": 3.85814377398019e-05,
"loss": 0.4079,
"step": 244
},
{
"epoch": 1.0458422174840085,
"grad_norm": 0.6490505807617634,
"learning_rate": 3.8559283419762134e-05,
"loss": 0.4176,
"step": 245
},
{
"epoch": 1.0501066098081024,
"grad_norm": 0.555231955081115,
"learning_rate": 3.853696390206484e-05,
"loss": 0.3918,
"step": 246
},
{
"epoch": 1.0543710021321961,
"grad_norm": 0.7216281242065635,
"learning_rate": 3.8514479385377813e-05,
"loss": 0.451,
"step": 247
},
{
"epoch": 1.05863539445629,
"grad_norm": 0.6113808147518047,
"learning_rate": 3.8491830069837526e-05,
"loss": 0.3889,
"step": 248
},
{
"epoch": 1.0628997867803838,
"grad_norm": 0.7048840459286184,
"learning_rate": 3.846901615704734e-05,
"loss": 0.4507,
"step": 249
},
{
"epoch": 1.0671641791044777,
"grad_norm": 0.8160850539508353,
"learning_rate": 3.84460378500757e-05,
"loss": 0.4542,
"step": 250
},
{
"epoch": 1.0714285714285714,
"grad_norm": 0.7961547389944886,
"learning_rate": 3.842289535345435e-05,
"loss": 0.4084,
"step": 251
},
{
"epoch": 1.0756929637526653,
"grad_norm": 0.6472721075881466,
"learning_rate": 3.839958887317649e-05,
"loss": 0.4252,
"step": 252
},
{
"epoch": 1.079957356076759,
"grad_norm": 0.7571976997791344,
"learning_rate": 3.837611861669495e-05,
"loss": 0.4391,
"step": 253
},
{
"epoch": 1.084221748400853,
"grad_norm": 0.6417985486555089,
"learning_rate": 3.835248479292037e-05,
"loss": 0.4298,
"step": 254
},
{
"epoch": 1.0884861407249466,
"grad_norm": 0.5842886170539788,
"learning_rate": 3.832868761221926e-05,
"loss": 0.4299,
"step": 255
},
{
"epoch": 1.0927505330490406,
"grad_norm": 0.6130005274798123,
"learning_rate": 3.830472728641225e-05,
"loss": 0.4306,
"step": 256
},
{
"epoch": 1.0970149253731343,
"grad_norm": 0.6292597107386093,
"learning_rate": 3.828060402877209e-05,
"loss": 0.4227,
"step": 257
},
{
"epoch": 1.1012793176972282,
"grad_norm": 0.742136846771457,
"learning_rate": 3.825631805402182e-05,
"loss": 0.4385,
"step": 258
},
{
"epoch": 1.105543710021322,
"grad_norm": 0.5582311466247738,
"learning_rate": 3.823186957833282e-05,
"loss": 0.4066,
"step": 259
},
{
"epoch": 1.1098081023454158,
"grad_norm": 0.7107736966260204,
"learning_rate": 3.8207258819322936e-05,
"loss": 0.4485,
"step": 260
},
{
"epoch": 1.1140724946695095,
"grad_norm": 0.5933308768453202,
"learning_rate": 3.818248599605448e-05,
"loss": 0.3744,
"step": 261
},
{
"epoch": 1.1183368869936035,
"grad_norm": 0.664231277660076,
"learning_rate": 3.8157551329032315e-05,
"loss": 0.4399,
"step": 262
},
{
"epoch": 1.1226012793176972,
"grad_norm": 0.5756320234685443,
"learning_rate": 3.81324550402019e-05,
"loss": 0.3954,
"step": 263
},
{
"epoch": 1.126865671641791,
"grad_norm": 0.5450744362052974,
"learning_rate": 3.810719735294731e-05,
"loss": 0.4005,
"step": 264
},
{
"epoch": 1.1311300639658848,
"grad_norm": 0.6811551113220277,
"learning_rate": 3.808177849208922e-05,
"loss": 0.4376,
"step": 265
},
{
"epoch": 1.1353944562899787,
"grad_norm": 0.7297298114596543,
"learning_rate": 3.8056198683882914e-05,
"loss": 0.4441,
"step": 266
},
{
"epoch": 1.1396588486140724,
"grad_norm": 0.6654168989431408,
"learning_rate": 3.8030458156016326e-05,
"loss": 0.4166,
"step": 267
},
{
"epoch": 1.1439232409381663,
"grad_norm": 0.590097586325753,
"learning_rate": 3.800455713760792e-05,
"loss": 0.4459,
"step": 268
},
{
"epoch": 1.14818763326226,
"grad_norm": 0.6520050326986123,
"learning_rate": 3.797849585920474e-05,
"loss": 0.3812,
"step": 269
},
{
"epoch": 1.152452025586354,
"grad_norm": 0.5487525639612298,
"learning_rate": 3.795227455278029e-05,
"loss": 0.4349,
"step": 270
},
{
"epoch": 1.1567164179104479,
"grad_norm": 0.8148597696250168,
"learning_rate": 3.792589345173249e-05,
"loss": 0.4159,
"step": 271
},
{
"epoch": 1.1609808102345416,
"grad_norm": 0.5234208183844118,
"learning_rate": 3.7899352790881636e-05,
"loss": 0.4103,
"step": 272
},
{
"epoch": 1.1652452025586353,
"grad_norm": 0.7783194060987326,
"learning_rate": 3.787265280646825e-05,
"loss": 0.4337,
"step": 273
},
{
"epoch": 1.1695095948827292,
"grad_norm": 0.6077482932406173,
"learning_rate": 3.784579373615102e-05,
"loss": 0.4448,
"step": 274
},
{
"epoch": 1.1737739872068231,
"grad_norm": 0.713559723726581,
"learning_rate": 3.781877581900467e-05,
"loss": 0.3954,
"step": 275
},
{
"epoch": 1.1780383795309168,
"grad_norm": 0.6706429841933795,
"learning_rate": 3.7791599295517825e-05,
"loss": 0.4363,
"step": 276
},
{
"epoch": 1.1823027718550105,
"grad_norm": 0.7750344644644377,
"learning_rate": 3.776426440759088e-05,
"loss": 0.435,
"step": 277
},
{
"epoch": 1.1865671641791045,
"grad_norm": 0.6485505143173109,
"learning_rate": 3.7736771398533855e-05,
"loss": 0.3992,
"step": 278
},
{
"epoch": 1.1908315565031984,
"grad_norm": 0.6278621684062865,
"learning_rate": 3.7709120513064196e-05,
"loss": 0.4532,
"step": 279
},
{
"epoch": 1.195095948827292,
"grad_norm": 0.4993768641894848,
"learning_rate": 3.768131199730465e-05,
"loss": 0.4289,
"step": 280
},
{
"epoch": 1.199360341151386,
"grad_norm": 0.5680603593838152,
"learning_rate": 3.7653346098781016e-05,
"loss": 0.4178,
"step": 281
},
{
"epoch": 1.2036247334754797,
"grad_norm": 0.6166978263820861,
"learning_rate": 3.762522306641998e-05,
"loss": 0.4082,
"step": 282
},
{
"epoch": 1.2078891257995736,
"grad_norm": 0.49268121002441434,
"learning_rate": 3.759694315054689e-05,
"loss": 0.4053,
"step": 283
},
{
"epoch": 1.2121535181236673,
"grad_norm": 0.5952406524302505,
"learning_rate": 3.7568506602883535e-05,
"loss": 0.4392,
"step": 284
},
{
"epoch": 1.2164179104477613,
"grad_norm": 0.7640007391300089,
"learning_rate": 3.7539913676545874e-05,
"loss": 0.4142,
"step": 285
},
{
"epoch": 1.220682302771855,
"grad_norm": 0.566641974103758,
"learning_rate": 3.7511164626041823e-05,
"loss": 0.4077,
"step": 286
},
{
"epoch": 1.224946695095949,
"grad_norm": 0.6259927094471025,
"learning_rate": 3.748225970726897e-05,
"loss": 0.4067,
"step": 287
},
{
"epoch": 1.2292110874200426,
"grad_norm": 0.5599340338022601,
"learning_rate": 3.745319917751229e-05,
"loss": 0.4483,
"step": 288
},
{
"epoch": 1.2334754797441365,
"grad_norm": 0.5263970037589443,
"learning_rate": 3.742398329544187e-05,
"loss": 0.4036,
"step": 289
},
{
"epoch": 1.2377398720682302,
"grad_norm": 0.5983772515182831,
"learning_rate": 3.7394612321110606e-05,
"loss": 0.4485,
"step": 290
},
{
"epoch": 1.2420042643923241,
"grad_norm": 0.7056768634699175,
"learning_rate": 3.736508651595188e-05,
"loss": 0.4423,
"step": 291
},
{
"epoch": 1.2462686567164178,
"grad_norm": 0.6538542504257794,
"learning_rate": 3.733540614277721e-05,
"loss": 0.3845,
"step": 292
},
{
"epoch": 1.2505330490405118,
"grad_norm": 0.5352314141573133,
"learning_rate": 3.7305571465773976e-05,
"loss": 0.455,
"step": 293
},
{
"epoch": 1.2547974413646055,
"grad_norm": 0.8442015630816173,
"learning_rate": 3.727558275050301e-05,
"loss": 0.4175,
"step": 294
},
{
"epoch": 1.2590618336886994,
"grad_norm": 0.7334448723876601,
"learning_rate": 3.724544026389624e-05,
"loss": 0.4193,
"step": 295
},
{
"epoch": 1.263326226012793,
"grad_norm": 0.6837481469169735,
"learning_rate": 3.721514427425435e-05,
"loss": 0.4563,
"step": 296
},
{
"epoch": 1.267590618336887,
"grad_norm": 0.4843766573199469,
"learning_rate": 3.718469505124434e-05,
"loss": 0.393,
"step": 297
},
{
"epoch": 1.271855010660981,
"grad_norm": 0.5246589482230002,
"learning_rate": 3.715409286589717e-05,
"loss": 0.4223,
"step": 298
},
{
"epoch": 1.2761194029850746,
"grad_norm": 0.502659816866133,
"learning_rate": 3.7123337990605335e-05,
"loss": 0.4147,
"step": 299
},
{
"epoch": 1.2803837953091683,
"grad_norm": 0.5247676176001221,
"learning_rate": 3.709243069912041e-05,
"loss": 0.4625,
"step": 300
},
{
"epoch": 1.2846481876332623,
"grad_norm": 0.45876509318226205,
"learning_rate": 3.706137126655066e-05,
"loss": 0.3995,
"step": 301
},
{
"epoch": 1.2889125799573562,
"grad_norm": 0.4627163601994038,
"learning_rate": 3.703015996935858e-05,
"loss": 0.4193,
"step": 302
},
{
"epoch": 1.29317697228145,
"grad_norm": 0.6686928675672329,
"learning_rate": 3.699879708535838e-05,
"loss": 0.4439,
"step": 303
},
{
"epoch": 1.2974413646055436,
"grad_norm": 0.5922109448050908,
"learning_rate": 3.6967282893713595e-05,
"loss": 0.4263,
"step": 304
},
{
"epoch": 1.3017057569296375,
"grad_norm": 0.5470627126862041,
"learning_rate": 3.6935617674934554e-05,
"loss": 0.4062,
"step": 305
},
{
"epoch": 1.3059701492537314,
"grad_norm": 0.6136095476120296,
"learning_rate": 3.69038017108759e-05,
"loss": 0.4738,
"step": 306
},
{
"epoch": 1.3102345415778252,
"grad_norm": 0.7612711898578125,
"learning_rate": 3.687183528473404e-05,
"loss": 0.4267,
"step": 307
},
{
"epoch": 1.3144989339019189,
"grad_norm": 0.5910092852049679,
"learning_rate": 3.683971868104468e-05,
"loss": 0.4401,
"step": 308
},
{
"epoch": 1.3187633262260128,
"grad_norm": 0.5005219840239118,
"learning_rate": 3.680745218568026e-05,
"loss": 0.4137,
"step": 309
},
{
"epoch": 1.3230277185501067,
"grad_norm": 0.7165297595567666,
"learning_rate": 3.677503608584743e-05,
"loss": 0.4336,
"step": 310
},
{
"epoch": 1.3272921108742004,
"grad_norm": 0.7862156021846395,
"learning_rate": 3.674247067008447e-05,
"loss": 0.4259,
"step": 311
},
{
"epoch": 1.331556503198294,
"grad_norm": 0.7808877468122921,
"learning_rate": 3.6709756228258735e-05,
"loss": 0.4782,
"step": 312
},
{
"epoch": 1.335820895522388,
"grad_norm": 0.6561500406466976,
"learning_rate": 3.667689305156406e-05,
"loss": 0.3878,
"step": 313
},
{
"epoch": 1.340085287846482,
"grad_norm": 0.5372106013768749,
"learning_rate": 3.664388143251822e-05,
"loss": 0.4225,
"step": 314
},
{
"epoch": 1.3443496801705757,
"grad_norm": 0.6924199058045664,
"learning_rate": 3.6610721664960236e-05,
"loss": 0.4364,
"step": 315
},
{
"epoch": 1.3486140724946696,
"grad_norm": 0.7015133870310188,
"learning_rate": 3.657741404404785e-05,
"loss": 0.4536,
"step": 316
},
{
"epoch": 1.3528784648187633,
"grad_norm": 0.693862690594011,
"learning_rate": 3.654395886625484e-05,
"loss": 0.4069,
"step": 317
},
{
"epoch": 1.3571428571428572,
"grad_norm": 0.6224295371607119,
"learning_rate": 3.65103564293684e-05,
"loss": 0.4283,
"step": 318
},
{
"epoch": 1.361407249466951,
"grad_norm": 0.6738546833298403,
"learning_rate": 3.647660703248651e-05,
"loss": 0.4368,
"step": 319
},
{
"epoch": 1.3656716417910448,
"grad_norm": 0.5857243004349567,
"learning_rate": 3.644271097601521e-05,
"loss": 0.4281,
"step": 320
},
{
"epoch": 1.3699360341151385,
"grad_norm": 0.5585286307033677,
"learning_rate": 3.640866856166601e-05,
"loss": 0.3689,
"step": 321
},
{
"epoch": 1.3742004264392325,
"grad_norm": 0.6876061087292902,
"learning_rate": 3.637448009245315e-05,
"loss": 0.4533,
"step": 322
},
{
"epoch": 1.3784648187633262,
"grad_norm": 0.6596849434007818,
"learning_rate": 3.63401458726909e-05,
"loss": 0.4347,
"step": 323
},
{
"epoch": 1.38272921108742,
"grad_norm": 0.6690336155185622,
"learning_rate": 3.6305666207990886e-05,
"loss": 0.4667,
"step": 324
},
{
"epoch": 1.3869936034115138,
"grad_norm": 0.6173155021258911,
"learning_rate": 3.6271041405259354e-05,
"loss": 0.4212,
"step": 325
},
{
"epoch": 1.3912579957356077,
"grad_norm": 0.632565166171892,
"learning_rate": 3.623627177269441e-05,
"loss": 0.4105,
"step": 326
},
{
"epoch": 1.3955223880597014,
"grad_norm": 0.5784228743084681,
"learning_rate": 3.6201357619783336e-05,
"loss": 0.4114,
"step": 327
},
{
"epoch": 1.3997867803837953,
"grad_norm": 0.5604209350319785,
"learning_rate": 3.616629925729977e-05,
"loss": 0.4415,
"step": 328
},
{
"epoch": 1.4040511727078893,
"grad_norm": 0.6626347666514213,
"learning_rate": 3.613109699730099e-05,
"loss": 0.4459,
"step": 329
},
{
"epoch": 1.408315565031983,
"grad_norm": 0.496782761321269,
"learning_rate": 3.609575115312511e-05,
"loss": 0.4213,
"step": 330
},
{
"epoch": 1.4125799573560767,
"grad_norm": 0.6361067708225957,
"learning_rate": 3.60602620393883e-05,
"loss": 0.3538,
"step": 331
},
{
"epoch": 1.4168443496801706,
"grad_norm": 0.6654635023457927,
"learning_rate": 3.602462997198198e-05,
"loss": 0.4233,
"step": 332
},
{
"epoch": 1.4211087420042645,
"grad_norm": 0.6225178321156866,
"learning_rate": 3.598885526807003e-05,
"loss": 0.4167,
"step": 333
},
{
"epoch": 1.4253731343283582,
"grad_norm": 0.6043453020450956,
"learning_rate": 3.595293824608591e-05,
"loss": 0.4121,
"step": 334
},
{
"epoch": 1.429637526652452,
"grad_norm": 0.5986851241982426,
"learning_rate": 3.591687922572991e-05,
"loss": 0.4422,
"step": 335
},
{
"epoch": 1.4339019189765458,
"grad_norm": 0.5894944313822743,
"learning_rate": 3.5880678527966224e-05,
"loss": 0.4221,
"step": 336
},
{
"epoch": 1.4381663113006398,
"grad_norm": 0.49054383198031515,
"learning_rate": 3.584433647502015e-05,
"loss": 0.3841,
"step": 337
},
{
"epoch": 1.4424307036247335,
"grad_norm": 0.5274462614351788,
"learning_rate": 3.580785339037519e-05,
"loss": 0.4065,
"step": 338
},
{
"epoch": 1.4466950959488272,
"grad_norm": 0.5278549805650488,
"learning_rate": 3.577122959877017e-05,
"loss": 0.4572,
"step": 339
},
{
"epoch": 1.450959488272921,
"grad_norm": 0.6317113335812257,
"learning_rate": 3.57344654261964e-05,
"loss": 0.4191,
"step": 340
},
{
"epoch": 1.455223880597015,
"grad_norm": 0.5600572445916931,
"learning_rate": 3.569756119989467e-05,
"loss": 0.399,
"step": 341
},
{
"epoch": 1.4594882729211087,
"grad_norm": 0.594198415593923,
"learning_rate": 3.566051724835245e-05,
"loss": 0.4455,
"step": 342
},
{
"epoch": 1.4637526652452024,
"grad_norm": 0.564190038407502,
"learning_rate": 3.562333390130089e-05,
"loss": 0.3932,
"step": 343
},
{
"epoch": 1.4680170575692963,
"grad_norm": 0.5954898940274361,
"learning_rate": 3.5586011489711934e-05,
"loss": 0.45,
"step": 344
},
{
"epoch": 1.4722814498933903,
"grad_norm": 0.48911744248144157,
"learning_rate": 3.554855034579532e-05,
"loss": 0.3832,
"step": 345
},
{
"epoch": 1.476545842217484,
"grad_norm": 0.6441473712847673,
"learning_rate": 3.551095080299568e-05,
"loss": 0.4369,
"step": 346
},
{
"epoch": 1.480810234541578,
"grad_norm": 0.6183527348725328,
"learning_rate": 3.5473213195989526e-05,
"loss": 0.4255,
"step": 347
},
{
"epoch": 1.4850746268656716,
"grad_norm": 0.6003497153907605,
"learning_rate": 3.5435337860682304e-05,
"loss": 0.4118,
"step": 348
},
{
"epoch": 1.4893390191897655,
"grad_norm": 0.7004108419888099,
"learning_rate": 3.539732513420538e-05,
"loss": 0.4425,
"step": 349
},
{
"epoch": 1.4936034115138592,
"grad_norm": 0.6873870156526409,
"learning_rate": 3.535917535491306e-05,
"loss": 0.4296,
"step": 350
},
{
"epoch": 1.4978678038379531,
"grad_norm": 0.5528330417053992,
"learning_rate": 3.532088886237956e-05,
"loss": 0.4146,
"step": 351
},
{
"epoch": 1.502132196162047,
"grad_norm": 0.6819580069714934,
"learning_rate": 3.5282465997396e-05,
"loss": 0.4364,
"step": 352
},
{
"epoch": 1.5063965884861408,
"grad_norm": 0.7074884540484468,
"learning_rate": 3.524390710196735e-05,
"loss": 0.4194,
"step": 353
},
{
"epoch": 1.5106609808102345,
"grad_norm": 0.5952225677619433,
"learning_rate": 3.520521251930941e-05,
"loss": 0.3989,
"step": 354
},
{
"epoch": 1.5149253731343284,
"grad_norm": 0.6477164505835896,
"learning_rate": 3.516638259384573e-05,
"loss": 0.4493,
"step": 355
},
{
"epoch": 1.5191897654584223,
"grad_norm": 0.6137858732201392,
"learning_rate": 3.512741767120457e-05,
"loss": 0.4015,
"step": 356
},
{
"epoch": 1.523454157782516,
"grad_norm": 0.5341242976683102,
"learning_rate": 3.5088318098215805e-05,
"loss": 0.4403,
"step": 357
},
{
"epoch": 1.5277185501066097,
"grad_norm": 0.5910346237420299,
"learning_rate": 3.5049084222907846e-05,
"loss": 0.4203,
"step": 358
},
{
"epoch": 1.5319829424307037,
"grad_norm": 0.6750588762077021,
"learning_rate": 3.5009716394504526e-05,
"loss": 0.4366,
"step": 359
},
{
"epoch": 1.5362473347547976,
"grad_norm": 0.6282869693485486,
"learning_rate": 3.497021496342203e-05,
"loss": 0.4029,
"step": 360
},
{
"epoch": 1.5405117270788913,
"grad_norm": 0.4327732025523846,
"learning_rate": 3.493058028126572e-05,
"loss": 0.3786,
"step": 361
},
{
"epoch": 1.544776119402985,
"grad_norm": 0.5628905924554551,
"learning_rate": 3.4890812700827065e-05,
"loss": 0.442,
"step": 362
},
{
"epoch": 1.549040511727079,
"grad_norm": 0.7672190095876459,
"learning_rate": 3.485091257608047e-05,
"loss": 0.4599,
"step": 363
},
{
"epoch": 1.5533049040511728,
"grad_norm": 0.4315110702270802,
"learning_rate": 3.48108802621801e-05,
"loss": 0.4105,
"step": 364
},
{
"epoch": 1.5575692963752665,
"grad_norm": 0.699119311503924,
"learning_rate": 3.477071611545676e-05,
"loss": 0.4625,
"step": 365
},
{
"epoch": 1.5618336886993602,
"grad_norm": 0.62580213083298,
"learning_rate": 3.473042049341474e-05,
"loss": 0.3939,
"step": 366
},
{
"epoch": 1.5660980810234542,
"grad_norm": 0.41085317867894716,
"learning_rate": 3.4689993754728564e-05,
"loss": 0.4227,
"step": 367
},
{
"epoch": 1.570362473347548,
"grad_norm": 0.8583066478775195,
"learning_rate": 3.464943625923984e-05,
"loss": 0.4174,
"step": 368
},
{
"epoch": 1.5746268656716418,
"grad_norm": 0.7483607145625111,
"learning_rate": 3.4608748367954064e-05,
"loss": 0.4246,
"step": 369
},
{
"epoch": 1.5788912579957355,
"grad_norm": 0.4588313921101154,
"learning_rate": 3.45679304430374e-05,
"loss": 0.44,
"step": 370
},
{
"epoch": 1.5831556503198294,
"grad_norm": 0.8463537495188219,
"learning_rate": 3.452698284781343e-05,
"loss": 0.4707,
"step": 371
},
{
"epoch": 1.5874200426439233,
"grad_norm": 0.6714451469463699,
"learning_rate": 3.4485905946759965e-05,
"loss": 0.4293,
"step": 372
},
{
"epoch": 1.591684434968017,
"grad_norm": 0.49084942527038844,
"learning_rate": 3.4444700105505766e-05,
"loss": 0.387,
"step": 373
},
{
"epoch": 1.5959488272921107,
"grad_norm": 0.7339552481136006,
"learning_rate": 3.44033656908273e-05,
"loss": 0.436,
"step": 374
},
{
"epoch": 1.6002132196162047,
"grad_norm": 0.9668658455117516,
"learning_rate": 3.4361903070645484e-05,
"loss": 0.446,
"step": 375
},
{
"epoch": 1.6044776119402986,
"grad_norm": 0.6437283133723504,
"learning_rate": 3.432031261402239e-05,
"loss": 0.4056,
"step": 376
},
{
"epoch": 1.6087420042643923,
"grad_norm": 0.5471114373733622,
"learning_rate": 3.4278594691157985e-05,
"loss": 0.3993,
"step": 377
},
{
"epoch": 1.613006396588486,
"grad_norm": 0.8359743208014425,
"learning_rate": 3.423674967338681e-05,
"loss": 0.4337,
"step": 378
},
{
"epoch": 1.61727078891258,
"grad_norm": 0.6077166341043039,
"learning_rate": 3.419477793317471e-05,
"loss": 0.4377,
"step": 379
},
{
"epoch": 1.6215351812366738,
"grad_norm": 0.4846179838526592,
"learning_rate": 3.415267984411547e-05,
"loss": 0.381,
"step": 380
},
{
"epoch": 1.6257995735607675,
"grad_norm": 0.5563839923571956,
"learning_rate": 3.411045578092754e-05,
"loss": 0.452,
"step": 381
},
{
"epoch": 1.6300639658848612,
"grad_norm": 0.5298650018801719,
"learning_rate": 3.406810611945065e-05,
"loss": 0.4023,
"step": 382
},
{
"epoch": 1.6343283582089554,
"grad_norm": 0.47036826245422314,
"learning_rate": 3.402563123664251e-05,
"loss": 0.43,
"step": 383
},
{
"epoch": 1.638592750533049,
"grad_norm": 0.5918256778605155,
"learning_rate": 3.398303151057543e-05,
"loss": 0.414,
"step": 384
},
{
"epoch": 1.6428571428571428,
"grad_norm": 0.4724509860010177,
"learning_rate": 3.394030732043296e-05,
"loss": 0.4261,
"step": 385
},
{
"epoch": 1.6471215351812367,
"grad_norm": 0.6438266470974083,
"learning_rate": 3.389745904650652e-05,
"loss": 0.4461,
"step": 386
},
{
"epoch": 1.6513859275053306,
"grad_norm": 0.5814714611362957,
"learning_rate": 3.385448707019199e-05,
"loss": 0.4159,
"step": 387
},
{
"epoch": 1.6556503198294243,
"grad_norm": 0.4206338335496547,
"learning_rate": 3.381139177398636e-05,
"loss": 0.4136,
"step": 388
},
{
"epoch": 1.659914712153518,
"grad_norm": 0.653347902879806,
"learning_rate": 3.3768173541484276e-05,
"loss": 0.4152,
"step": 389
},
{
"epoch": 1.664179104477612,
"grad_norm": 0.5717054823575618,
"learning_rate": 3.372483275737468e-05,
"loss": 0.4158,
"step": 390
},
{
"epoch": 1.668443496801706,
"grad_norm": 0.567865027735176,
"learning_rate": 3.3681369807437316e-05,
"loss": 0.4328,
"step": 391
},
{
"epoch": 1.6727078891257996,
"grad_norm": 0.5670521478372824,
"learning_rate": 3.363778507853938e-05,
"loss": 0.4398,
"step": 392
},
{
"epoch": 1.6769722814498933,
"grad_norm": 0.6187350261747493,
"learning_rate": 3.359407895863199e-05,
"loss": 0.445,
"step": 393
},
{
"epoch": 1.6812366737739872,
"grad_norm": 0.41177560006752595,
"learning_rate": 3.35502518367468e-05,
"loss": 0.4106,
"step": 394
},
{
"epoch": 1.6855010660980811,
"grad_norm": 0.7844489084961632,
"learning_rate": 3.350630410299252e-05,
"loss": 0.416,
"step": 395
},
{
"epoch": 1.6897654584221748,
"grad_norm": 0.5969945265152646,
"learning_rate": 3.34622361485514e-05,
"loss": 0.4537,
"step": 396
},
{
"epoch": 1.6940298507462686,
"grad_norm": 0.6009377011808636,
"learning_rate": 3.3418048365675835e-05,
"loss": 0.3976,
"step": 397
},
{
"epoch": 1.6982942430703625,
"grad_norm": 0.7970282912858717,
"learning_rate": 3.3373741147684776e-05,
"loss": 0.3958,
"step": 398
},
{
"epoch": 1.7025586353944564,
"grad_norm": 0.51714913947718,
"learning_rate": 3.332931488896029e-05,
"loss": 0.4424,
"step": 399
},
{
"epoch": 1.70682302771855,
"grad_norm": 0.6571324014445984,
"learning_rate": 3.328476998494405e-05,
"loss": 0.4293,
"step": 400
},
{
"epoch": 1.7110874200426438,
"grad_norm": 0.6733750404894205,
"learning_rate": 3.3240106832133784e-05,
"loss": 0.4012,
"step": 401
},
{
"epoch": 1.7153518123667377,
"grad_norm": 0.591500333207376,
"learning_rate": 3.319532582807977e-05,
"loss": 0.4224,
"step": 402
},
{
"epoch": 1.7196162046908317,
"grad_norm": 0.6174698680618833,
"learning_rate": 3.315042737138128e-05,
"loss": 0.4114,
"step": 403
},
{
"epoch": 1.7238805970149254,
"grad_norm": 0.5557934635630427,
"learning_rate": 3.3105411861683046e-05,
"loss": 0.4227,
"step": 404
},
{
"epoch": 1.728144989339019,
"grad_norm": 0.5300688328065255,
"learning_rate": 3.30602796996717e-05,
"loss": 0.3967,
"step": 405
},
{
"epoch": 1.732409381663113,
"grad_norm": 0.5760369736255456,
"learning_rate": 3.3015031287072225e-05,
"loss": 0.4386,
"step": 406
},
{
"epoch": 1.736673773987207,
"grad_norm": 0.4555624191417979,
"learning_rate": 3.296966702664431e-05,
"loss": 0.4274,
"step": 407
},
{
"epoch": 1.7409381663113006,
"grad_norm": 0.5059665427249744,
"learning_rate": 3.2924187322178865e-05,
"loss": 0.4145,
"step": 408
},
{
"epoch": 1.7452025586353943,
"grad_norm": 0.46756504233420204,
"learning_rate": 3.287859257849434e-05,
"loss": 0.4309,
"step": 409
},
{
"epoch": 1.7494669509594882,
"grad_norm": 0.46285792608796633,
"learning_rate": 3.2832883201433185e-05,
"loss": 0.3712,
"step": 410
},
{
"epoch": 1.7537313432835822,
"grad_norm": 0.6245072491599375,
"learning_rate": 3.278705959785821e-05,
"loss": 0.4762,
"step": 411
},
{
"epoch": 1.7579957356076759,
"grad_norm": 0.6061486243901444,
"learning_rate": 3.274112217564892e-05,
"loss": 0.4067,
"step": 412
},
{
"epoch": 1.7622601279317696,
"grad_norm": 0.5820768104988486,
"learning_rate": 3.2695071343697996e-05,
"loss": 0.4124,
"step": 413
},
{
"epoch": 1.7665245202558635,
"grad_norm": 0.6819975740897306,
"learning_rate": 3.2648907511907544e-05,
"loss": 0.4446,
"step": 414
},
{
"epoch": 1.7707889125799574,
"grad_norm": 0.5715841364919834,
"learning_rate": 3.2602631091185506e-05,
"loss": 0.4548,
"step": 415
},
{
"epoch": 1.775053304904051,
"grad_norm": 0.5021404496551074,
"learning_rate": 3.255624249344198e-05,
"loss": 0.415,
"step": 416
},
{
"epoch": 1.779317697228145,
"grad_norm": 0.6236286206409429,
"learning_rate": 3.250974213158555e-05,
"loss": 0.418,
"step": 417
},
{
"epoch": 1.783582089552239,
"grad_norm": 0.5082012644538463,
"learning_rate": 3.246313041951966e-05,
"loss": 0.4172,
"step": 418
},
{
"epoch": 1.7878464818763327,
"grad_norm": 0.5804182502381373,
"learning_rate": 3.241640777213884e-05,
"loss": 0.4134,
"step": 419
},
{
"epoch": 1.7921108742004264,
"grad_norm": 0.5841814227406251,
"learning_rate": 3.23695746053251e-05,
"loss": 0.3799,
"step": 420
},
{
"epoch": 1.7963752665245203,
"grad_norm": 0.561217410497316,
"learning_rate": 3.232263133594419e-05,
"loss": 0.4553,
"step": 421
},
{
"epoch": 1.8006396588486142,
"grad_norm": 0.5791964104930211,
"learning_rate": 3.2275578381841876e-05,
"loss": 0.4318,
"step": 422
},
{
"epoch": 1.804904051172708,
"grad_norm": 0.5015381227163118,
"learning_rate": 3.222841616184025e-05,
"loss": 0.4287,
"step": 423
},
{
"epoch": 1.8091684434968016,
"grad_norm": 0.4931963971340661,
"learning_rate": 3.218114509573397e-05,
"loss": 0.4177,
"step": 424
},
{
"epoch": 1.8134328358208955,
"grad_norm": 0.4139642925732555,
"learning_rate": 3.213376560428656e-05,
"loss": 0.3862,
"step": 425
},
{
"epoch": 1.8176972281449895,
"grad_norm": 0.6053235803447229,
"learning_rate": 3.208627810922665e-05,
"loss": 0.4278,
"step": 426
},
{
"epoch": 1.8219616204690832,
"grad_norm": 0.4928980958456602,
"learning_rate": 3.20386830332442e-05,
"loss": 0.4042,
"step": 427
},
{
"epoch": 1.8262260127931769,
"grad_norm": 0.5447428327419589,
"learning_rate": 3.199098079998676e-05,
"loss": 0.4338,
"step": 428
},
{
"epoch": 1.8304904051172708,
"grad_norm": 0.630636350668545,
"learning_rate": 3.194317183405573e-05,
"loss": 0.407,
"step": 429
},
{
"epoch": 1.8347547974413647,
"grad_norm": 0.4388934662903153,
"learning_rate": 3.189525656100248e-05,
"loss": 0.4002,
"step": 430
},
{
"epoch": 1.8390191897654584,
"grad_norm": 0.5477299121670924,
"learning_rate": 3.184723540732469e-05,
"loss": 0.3893,
"step": 431
},
{
"epoch": 1.8432835820895521,
"grad_norm": 0.5001645364202892,
"learning_rate": 3.1799108800462466e-05,
"loss": 0.4561,
"step": 432
},
{
"epoch": 1.847547974413646,
"grad_norm": 0.56411219365906,
"learning_rate": 3.175087716879454e-05,
"loss": 0.4391,
"step": 433
},
{
"epoch": 1.85181236673774,
"grad_norm": 0.49624337673620295,
"learning_rate": 3.170254094163454e-05,
"loss": 0.4447,
"step": 434
},
{
"epoch": 1.8560767590618337,
"grad_norm": 0.48073525858910976,
"learning_rate": 3.1654100549227024e-05,
"loss": 0.4207,
"step": 435
},
{
"epoch": 1.8603411513859274,
"grad_norm": 0.42948274359118516,
"learning_rate": 3.160555642274382e-05,
"loss": 0.3848,
"step": 436
},
{
"epoch": 1.8646055437100213,
"grad_norm": 0.5376792952401593,
"learning_rate": 3.155690899428002e-05,
"loss": 0.4532,
"step": 437
},
{
"epoch": 1.8688699360341152,
"grad_norm": 0.41636533485267063,
"learning_rate": 3.1508158696850275e-05,
"loss": 0.3942,
"step": 438
},
{
"epoch": 1.873134328358209,
"grad_norm": 0.5319610981633921,
"learning_rate": 3.1459305964384855e-05,
"loss": 0.4106,
"step": 439
},
{
"epoch": 1.8773987206823026,
"grad_norm": 0.448232868334879,
"learning_rate": 3.141035123172581e-05,
"loss": 0.4116,
"step": 440
},
{
"epoch": 1.8816631130063965,
"grad_norm": 0.5880866733448852,
"learning_rate": 3.136129493462312e-05,
"loss": 0.4367,
"step": 441
},
{
"epoch": 1.8859275053304905,
"grad_norm": 0.4930255466134952,
"learning_rate": 3.1312137509730776e-05,
"loss": 0.4176,
"step": 442
},
{
"epoch": 1.8901918976545842,
"grad_norm": 0.5224716593111842,
"learning_rate": 3.126287939460293e-05,
"loss": 0.4358,
"step": 443
},
{
"epoch": 1.8944562899786779,
"grad_norm": 0.5031275852344921,
"learning_rate": 3.121352102768998e-05,
"loss": 0.431,
"step": 444
},
{
"epoch": 1.8987206823027718,
"grad_norm": 0.4599363811170101,
"learning_rate": 3.116406284833468e-05,
"loss": 0.3975,
"step": 445
},
{
"epoch": 1.9029850746268657,
"grad_norm": 0.5204739120479832,
"learning_rate": 3.111450529676821e-05,
"loss": 0.4095,
"step": 446
},
{
"epoch": 1.9072494669509594,
"grad_norm": 0.41426066836339737,
"learning_rate": 3.106484881410628e-05,
"loss": 0.4312,
"step": 447
},
{
"epoch": 1.9115138592750534,
"grad_norm": 0.5350188027407833,
"learning_rate": 3.101509384234519e-05,
"loss": 0.3846,
"step": 448
},
{
"epoch": 1.9157782515991473,
"grad_norm": 0.5541430552086132,
"learning_rate": 3.09652408243579e-05,
"loss": 0.4402,
"step": 449
},
{
"epoch": 1.920042643923241,
"grad_norm": 0.45616595483342576,
"learning_rate": 3.091529020389009e-05,
"loss": 0.3975,
"step": 450
},
{
"epoch": 1.9243070362473347,
"grad_norm": 0.47195887268241116,
"learning_rate": 3.086524242555621e-05,
"loss": 0.4186,
"step": 451
},
{
"epoch": 1.9285714285714286,
"grad_norm": 0.417937083067313,
"learning_rate": 3.081509793483551e-05,
"loss": 0.3991,
"step": 452
},
{
"epoch": 1.9328358208955225,
"grad_norm": 0.5147323724222759,
"learning_rate": 3.076485717806808e-05,
"loss": 0.4352,
"step": 453
},
{
"epoch": 1.9371002132196162,
"grad_norm": 0.4721363439626508,
"learning_rate": 3.0714520602450896e-05,
"loss": 0.4399,
"step": 454
},
{
"epoch": 1.94136460554371,
"grad_norm": 0.5204879106863163,
"learning_rate": 3.066408865603383e-05,
"loss": 0.4044,
"step": 455
},
{
"epoch": 1.9456289978678039,
"grad_norm": 0.47869603996362364,
"learning_rate": 3.061356178771564e-05,
"loss": 0.4344,
"step": 456
},
{
"epoch": 1.9498933901918978,
"grad_norm": 0.5487456033934804,
"learning_rate": 3.056294044723999e-05,
"loss": 0.3805,
"step": 457
},
{
"epoch": 1.9541577825159915,
"grad_norm": 0.517435524986642,
"learning_rate": 3.051222508519148e-05,
"loss": 0.4757,
"step": 458
},
{
"epoch": 1.9584221748400852,
"grad_norm": 0.5445809033478819,
"learning_rate": 3.0461416152991555e-05,
"loss": 0.3884,
"step": 459
},
{
"epoch": 1.962686567164179,
"grad_norm": 0.5736641325670427,
"learning_rate": 3.0410514102894582e-05,
"loss": 0.4237,
"step": 460
},
{
"epoch": 1.966950959488273,
"grad_norm": 0.5181389483618426,
"learning_rate": 3.0359519387983738e-05,
"loss": 0.4501,
"step": 461
},
{
"epoch": 1.9712153518123667,
"grad_norm": 0.7055217266557069,
"learning_rate": 3.0308432462167045e-05,
"loss": 0.4302,
"step": 462
},
{
"epoch": 1.9754797441364604,
"grad_norm": 0.45222781902353903,
"learning_rate": 3.0257253780173293e-05,
"loss": 0.3897,
"step": 463
},
{
"epoch": 1.9797441364605544,
"grad_norm": 0.5399300307884194,
"learning_rate": 3.0205983797548e-05,
"loss": 0.4031,
"step": 464
},
{
"epoch": 1.9840085287846483,
"grad_norm": 0.48971933733689127,
"learning_rate": 3.015462297064936e-05,
"loss": 0.4133,
"step": 465
},
{
"epoch": 1.988272921108742,
"grad_norm": 0.44738322674726755,
"learning_rate": 3.010317175664416e-05,
"loss": 0.395,
"step": 466
},
{
"epoch": 1.9925373134328357,
"grad_norm": 0.5055000394229842,
"learning_rate": 3.0051630613503776e-05,
"loss": 0.4032,
"step": 467
},
{
"epoch": 1.9968017057569296,
"grad_norm": 0.4866897113581119,
"learning_rate": 3.0000000000000004e-05,
"loss": 0.4457,
"step": 468
},
{
"epoch": 2.002132196162047,
"grad_norm": 1.003557894755282,
"learning_rate": 2.994828037570104e-05,
"loss": 0.7268,
"step": 469
},
{
"epoch": 2.0063965884861408,
"grad_norm": 0.8084026414756201,
"learning_rate": 2.9896472200967377e-05,
"loss": 0.3024,
"step": 470
},
{
"epoch": 2.0106609808102345,
"grad_norm": 0.9696863524057489,
"learning_rate": 2.98445759369477e-05,
"loss": 0.3435,
"step": 471
},
{
"epoch": 2.014925373134328,
"grad_norm": 1.0289459178017686,
"learning_rate": 2.9792592045574772e-05,
"loss": 0.3344,
"step": 472
},
{
"epoch": 2.0191897654584223,
"grad_norm": 0.6422882221374078,
"learning_rate": 2.9740520989561357e-05,
"loss": 0.3192,
"step": 473
},
{
"epoch": 2.023454157782516,
"grad_norm": 0.7686763465029794,
"learning_rate": 2.9688363232396056e-05,
"loss": 0.3685,
"step": 474
},
{
"epoch": 2.0277185501066097,
"grad_norm": 0.7619507919081873,
"learning_rate": 2.9636119238339213e-05,
"loss": 0.3232,
"step": 475
},
{
"epoch": 2.0319829424307034,
"grad_norm": 0.7077722853700464,
"learning_rate": 2.958378947241878e-05,
"loss": 0.3433,
"step": 476
},
{
"epoch": 2.0362473347547976,
"grad_norm": 0.9555687435185575,
"learning_rate": 2.9531374400426158e-05,
"loss": 0.346,
"step": 477
},
{
"epoch": 2.0405117270788913,
"grad_norm": 0.7037304850265937,
"learning_rate": 2.9478874488912078e-05,
"loss": 0.3333,
"step": 478
},
{
"epoch": 2.044776119402985,
"grad_norm": 0.674207846993653,
"learning_rate": 2.9426290205182418e-05,
"loss": 0.3334,
"step": 479
},
{
"epoch": 2.0490405117270787,
"grad_norm": 0.7820884567891954,
"learning_rate": 2.9373622017294075e-05,
"loss": 0.3309,
"step": 480
},
{
"epoch": 2.053304904051173,
"grad_norm": 0.5305149128764548,
"learning_rate": 2.9320870394050783e-05,
"loss": 0.3287,
"step": 481
},
{
"epoch": 2.0575692963752665,
"grad_norm": 0.7079912872295963,
"learning_rate": 2.926803580499892e-05,
"loss": 0.3194,
"step": 482
},
{
"epoch": 2.0618336886993602,
"grad_norm": 0.5541579502858093,
"learning_rate": 2.9215118720423375e-05,
"loss": 0.3444,
"step": 483
},
{
"epoch": 2.066098081023454,
"grad_norm": 0.6513668190534649,
"learning_rate": 2.9162119611343324e-05,
"loss": 0.3327,
"step": 484
},
{
"epoch": 2.070362473347548,
"grad_norm": 0.5294612918314424,
"learning_rate": 2.910903894950805e-05,
"loss": 0.343,
"step": 485
},
{
"epoch": 2.074626865671642,
"grad_norm": 0.4752949935850779,
"learning_rate": 2.9055877207392752e-05,
"loss": 0.3062,
"step": 486
},
{
"epoch": 2.0788912579957355,
"grad_norm": 0.4781620758478508,
"learning_rate": 2.900263485819431e-05,
"loss": 0.3214,
"step": 487
},
{
"epoch": 2.0831556503198296,
"grad_norm": 0.548093488067997,
"learning_rate": 2.8949312375827116e-05,
"loss": 0.3296,
"step": 488
},
{
"epoch": 2.0874200426439233,
"grad_norm": 0.4938614632595294,
"learning_rate": 2.8895910234918828e-05,
"loss": 0.3309,
"step": 489
},
{
"epoch": 2.091684434968017,
"grad_norm": 0.5185353183120329,
"learning_rate": 2.8842428910806143e-05,
"loss": 0.3318,
"step": 490
},
{
"epoch": 2.0959488272921107,
"grad_norm": 0.5799498196416928,
"learning_rate": 2.878886887953059e-05,
"loss": 0.3296,
"step": 491
},
{
"epoch": 2.100213219616205,
"grad_norm": 0.4942313754302595,
"learning_rate": 2.873523061783426e-05,
"loss": 0.3175,
"step": 492
},
{
"epoch": 2.1044776119402986,
"grad_norm": 0.5236125945451315,
"learning_rate": 2.8681514603155575e-05,
"loss": 0.3013,
"step": 493
},
{
"epoch": 2.1087420042643923,
"grad_norm": 0.4518285850453316,
"learning_rate": 2.8627721313625073e-05,
"loss": 0.3316,
"step": 494
},
{
"epoch": 2.113006396588486,
"grad_norm": 0.5126760484367925,
"learning_rate": 2.8573851228061084e-05,
"loss": 0.3355,
"step": 495
},
{
"epoch": 2.11727078891258,
"grad_norm": 0.5029294170658799,
"learning_rate": 2.851990482596554e-05,
"loss": 0.3416,
"step": 496
},
{
"epoch": 2.121535181236674,
"grad_norm": 0.4288555624298432,
"learning_rate": 2.846588258751964e-05,
"loss": 0.3195,
"step": 497
},
{
"epoch": 2.1257995735607675,
"grad_norm": 0.4298499268015924,
"learning_rate": 2.8411784993579633e-05,
"loss": 0.3052,
"step": 498
},
{
"epoch": 2.1300639658848612,
"grad_norm": 0.4671221612093904,
"learning_rate": 2.8357612525672503e-05,
"loss": 0.3337,
"step": 499
},
{
"epoch": 2.1343283582089554,
"grad_norm": 0.5498904625760632,
"learning_rate": 2.830336566599169e-05,
"loss": 0.3416,
"step": 500
},
{
"epoch": 2.138592750533049,
"grad_norm": 0.40997801357104674,
"learning_rate": 2.8249044897392814e-05,
"loss": 0.3216,
"step": 501
},
{
"epoch": 2.142857142857143,
"grad_norm": 0.5165614467304281,
"learning_rate": 2.819465070338935e-05,
"loss": 0.3189,
"step": 502
},
{
"epoch": 2.1471215351812365,
"grad_norm": 0.37915760405651994,
"learning_rate": 2.814018356814834e-05,
"loss": 0.324,
"step": 503
},
{
"epoch": 2.1513859275053306,
"grad_norm": 0.6558262452058821,
"learning_rate": 2.80856439764861e-05,
"loss": 0.3607,
"step": 504
},
{
"epoch": 2.1556503198294243,
"grad_norm": 0.4499344644834719,
"learning_rate": 2.8031032413863863e-05,
"loss": 0.3473,
"step": 505
},
{
"epoch": 2.159914712153518,
"grad_norm": 0.49760589957195217,
"learning_rate": 2.7976349366383487e-05,
"loss": 0.3408,
"step": 506
},
{
"epoch": 2.1641791044776117,
"grad_norm": 0.4523387967822899,
"learning_rate": 2.792159532078314e-05,
"loss": 0.3222,
"step": 507
},
{
"epoch": 2.168443496801706,
"grad_norm": 0.4724751053334817,
"learning_rate": 2.786677076443292e-05,
"loss": 0.3405,
"step": 508
},
{
"epoch": 2.1727078891257996,
"grad_norm": 0.4128778956844833,
"learning_rate": 2.7811876185330566e-05,
"loss": 0.3303,
"step": 509
},
{
"epoch": 2.1769722814498933,
"grad_norm": 0.4598923533907129,
"learning_rate": 2.77569120720971e-05,
"loss": 0.3263,
"step": 510
},
{
"epoch": 2.181236673773987,
"grad_norm": 0.4398641540643089,
"learning_rate": 2.770187891397246e-05,
"loss": 0.3357,
"step": 511
},
{
"epoch": 2.185501066098081,
"grad_norm": 0.39972559010107306,
"learning_rate": 2.764677720081116e-05,
"loss": 0.3167,
"step": 512
},
{
"epoch": 2.189765458422175,
"grad_norm": 0.39291427360339126,
"learning_rate": 2.7591607423077932e-05,
"loss": 0.2892,
"step": 513
},
{
"epoch": 2.1940298507462686,
"grad_norm": 0.43942382097226057,
"learning_rate": 2.7536370071843353e-05,
"loss": 0.3463,
"step": 514
},
{
"epoch": 2.1982942430703627,
"grad_norm": 0.455479906539244,
"learning_rate": 2.748106563877948e-05,
"loss": 0.3309,
"step": 515
},
{
"epoch": 2.2025586353944564,
"grad_norm": 0.47069509530231296,
"learning_rate": 2.7425694616155474e-05,
"loss": 0.3302,
"step": 516
},
{
"epoch": 2.20682302771855,
"grad_norm": 0.46486150824863315,
"learning_rate": 2.7370257496833206e-05,
"loss": 0.3247,
"step": 517
},
{
"epoch": 2.211087420042644,
"grad_norm": 0.52520003158657,
"learning_rate": 2.7314754774262885e-05,
"loss": 0.33,
"step": 518
},
{
"epoch": 2.2153518123667375,
"grad_norm": 0.4133516757830205,
"learning_rate": 2.7259186942478656e-05,
"loss": 0.3136,
"step": 519
},
{
"epoch": 2.2196162046908317,
"grad_norm": 0.4584557324610895,
"learning_rate": 2.720355449609421e-05,
"loss": 0.3336,
"step": 520
},
{
"epoch": 2.2238805970149254,
"grad_norm": 0.5088846176274742,
"learning_rate": 2.7147857930298377e-05,
"loss": 0.3503,
"step": 521
},
{
"epoch": 2.228144989339019,
"grad_norm": 0.4105166262617953,
"learning_rate": 2.7092097740850712e-05,
"loss": 0.372,
"step": 522
},
{
"epoch": 2.232409381663113,
"grad_norm": 0.48064341561246865,
"learning_rate": 2.7036274424077107e-05,
"loss": 0.3284,
"step": 523
},
{
"epoch": 2.236673773987207,
"grad_norm": 0.43023264705358255,
"learning_rate": 2.698038847686533e-05,
"loss": 0.3209,
"step": 524
},
{
"epoch": 2.2409381663113006,
"grad_norm": 0.41178473939160515,
"learning_rate": 2.692444039666066e-05,
"loss": 0.3064,
"step": 525
},
{
"epoch": 2.2452025586353943,
"grad_norm": 0.488463118984366,
"learning_rate": 2.6868430681461385e-05,
"loss": 0.3322,
"step": 526
},
{
"epoch": 2.2494669509594885,
"grad_norm": 0.4149213055208161,
"learning_rate": 2.681235982981444e-05,
"loss": 0.3559,
"step": 527
},
{
"epoch": 2.253731343283582,
"grad_norm": 0.4817511814973644,
"learning_rate": 2.6756228340810946e-05,
"loss": 0.3055,
"step": 528
},
{
"epoch": 2.257995735607676,
"grad_norm": 0.39070396332909846,
"learning_rate": 2.670003671408174e-05,
"loss": 0.3168,
"step": 529
},
{
"epoch": 2.2622601279317696,
"grad_norm": 0.5203601542790566,
"learning_rate": 2.6643785449792967e-05,
"loss": 0.3282,
"step": 530
},
{
"epoch": 2.2665245202558637,
"grad_norm": 0.41130909380747965,
"learning_rate": 2.6587475048641596e-05,
"loss": 0.3468,
"step": 531
},
{
"epoch": 2.2707889125799574,
"grad_norm": 0.4364319149963848,
"learning_rate": 2.6531106011850988e-05,
"loss": 0.2883,
"step": 532
},
{
"epoch": 2.275053304904051,
"grad_norm": 0.4314737959819764,
"learning_rate": 2.6474678841166426e-05,
"loss": 0.3049,
"step": 533
},
{
"epoch": 2.279317697228145,
"grad_norm": 0.558103408427805,
"learning_rate": 2.6418194038850634e-05,
"loss": 0.3682,
"step": 534
},
{
"epoch": 2.283582089552239,
"grad_norm": 0.4584428049233054,
"learning_rate": 2.6361652107679335e-05,
"loss": 0.3401,
"step": 535
},
{
"epoch": 2.2878464818763327,
"grad_norm": 0.3837451796840203,
"learning_rate": 2.630505355093676e-05,
"loss": 0.3038,
"step": 536
},
{
"epoch": 2.2921108742004264,
"grad_norm": 0.48972122254900113,
"learning_rate": 2.624839887241115e-05,
"loss": 0.3486,
"step": 537
},
{
"epoch": 2.29637526652452,
"grad_norm": 0.375527961031895,
"learning_rate": 2.619168857639032e-05,
"loss": 0.3316,
"step": 538
},
{
"epoch": 2.300639658848614,
"grad_norm": 0.45411810854625184,
"learning_rate": 2.6134923167657106e-05,
"loss": 0.3169,
"step": 539
},
{
"epoch": 2.304904051172708,
"grad_norm": 0.42531128407316904,
"learning_rate": 2.607810315148494e-05,
"loss": 0.346,
"step": 540
},
{
"epoch": 2.3091684434968016,
"grad_norm": 0.41992642350814313,
"learning_rate": 2.6021229033633303e-05,
"loss": 0.3089,
"step": 541
},
{
"epoch": 2.3134328358208958,
"grad_norm": 0.41898615604904854,
"learning_rate": 2.5964301320343227e-05,
"loss": 0.3595,
"step": 542
},
{
"epoch": 2.3176972281449895,
"grad_norm": 0.4177885547418394,
"learning_rate": 2.5907320518332827e-05,
"loss": 0.313,
"step": 543
},
{
"epoch": 2.321961620469083,
"grad_norm": 0.38229897718006295,
"learning_rate": 2.5850287134792747e-05,
"loss": 0.321,
"step": 544
},
{
"epoch": 2.326226012793177,
"grad_norm": 0.4041612241717893,
"learning_rate": 2.5793201677381675e-05,
"loss": 0.3418,
"step": 545
},
{
"epoch": 2.3304904051172706,
"grad_norm": 0.4274720630355263,
"learning_rate": 2.5736064654221808e-05,
"loss": 0.336,
"step": 546
},
{
"epoch": 2.3347547974413647,
"grad_norm": 0.3861871646004023,
"learning_rate": 2.5678876573894326e-05,
"loss": 0.3482,
"step": 547
},
{
"epoch": 2.3390191897654584,
"grad_norm": 0.4077636527683995,
"learning_rate": 2.5621637945434876e-05,
"loss": 0.3286,
"step": 548
},
{
"epoch": 2.343283582089552,
"grad_norm": 0.3756224596945538,
"learning_rate": 2.5564349278329056e-05,
"loss": 0.3001,
"step": 549
},
{
"epoch": 2.3475479744136463,
"grad_norm": 0.4109819828033757,
"learning_rate": 2.5507011082507834e-05,
"loss": 0.3305,
"step": 550
},
{
"epoch": 2.35181236673774,
"grad_norm": 0.4383761223461391,
"learning_rate": 2.544962386834306e-05,
"loss": 0.3083,
"step": 551
},
{
"epoch": 2.3560767590618337,
"grad_norm": 0.41464003342514316,
"learning_rate": 2.539218814664288e-05,
"loss": 0.349,
"step": 552
},
{
"epoch": 2.3603411513859274,
"grad_norm": 0.5062569973089363,
"learning_rate": 2.5334704428647227e-05,
"loss": 0.3302,
"step": 553
},
{
"epoch": 2.364605543710021,
"grad_norm": 0.36480421490785353,
"learning_rate": 2.527717322602325e-05,
"loss": 0.3174,
"step": 554
},
{
"epoch": 2.368869936034115,
"grad_norm": 0.41474113980895305,
"learning_rate": 2.521959505086075e-05,
"loss": 0.3671,
"step": 555
},
{
"epoch": 2.373134328358209,
"grad_norm": 0.36718360788116927,
"learning_rate": 2.516197041566764e-05,
"loss": 0.2863,
"step": 556
},
{
"epoch": 2.3773987206823026,
"grad_norm": 0.42920079467315114,
"learning_rate": 2.510429983336539e-05,
"loss": 0.354,
"step": 557
},
{
"epoch": 2.3816631130063968,
"grad_norm": 0.46613801110394354,
"learning_rate": 2.5046583817284437e-05,
"loss": 0.3276,
"step": 558
},
{
"epoch": 2.3859275053304905,
"grad_norm": 0.35311255938462993,
"learning_rate": 2.4988822881159627e-05,
"loss": 0.3059,
"step": 559
},
{
"epoch": 2.390191897654584,
"grad_norm": 0.39778364103851305,
"learning_rate": 2.4931017539125648e-05,
"loss": 0.3399,
"step": 560
},
{
"epoch": 2.394456289978678,
"grad_norm": 0.3969131330840867,
"learning_rate": 2.487316830571244e-05,
"loss": 0.3441,
"step": 561
},
{
"epoch": 2.398720682302772,
"grad_norm": 0.3998460096155394,
"learning_rate": 2.481527569584064e-05,
"loss": 0.347,
"step": 562
},
{
"epoch": 2.4029850746268657,
"grad_norm": 0.3724128051878021,
"learning_rate": 2.4757340224816964e-05,
"loss": 0.3212,
"step": 563
},
{
"epoch": 2.4072494669509594,
"grad_norm": 0.4014481595437383,
"learning_rate": 2.4699362408329646e-05,
"loss": 0.3335,
"step": 564
},
{
"epoch": 2.411513859275053,
"grad_norm": 0.42703172622134855,
"learning_rate": 2.4641342762443845e-05,
"loss": 0.3464,
"step": 565
},
{
"epoch": 2.4157782515991473,
"grad_norm": 0.3889377354808822,
"learning_rate": 2.458328180359704e-05,
"loss": 0.3391,
"step": 566
},
{
"epoch": 2.420042643923241,
"grad_norm": 0.3966463724749422,
"learning_rate": 2.4525180048594452e-05,
"loss": 0.3274,
"step": 567
},
{
"epoch": 2.4243070362473347,
"grad_norm": 0.37604211049525493,
"learning_rate": 2.4467038014604402e-05,
"loss": 0.3301,
"step": 568
},
{
"epoch": 2.4285714285714284,
"grad_norm": 0.3860914267444212,
"learning_rate": 2.4408856219153775e-05,
"loss": 0.3415,
"step": 569
},
{
"epoch": 2.4328358208955225,
"grad_norm": 0.4195346230813679,
"learning_rate": 2.435063518012335e-05,
"loss": 0.3278,
"step": 570
},
{
"epoch": 2.4371002132196162,
"grad_norm": 0.4641696550558176,
"learning_rate": 2.4292375415743235e-05,
"loss": 0.3766,
"step": 571
},
{
"epoch": 2.44136460554371,
"grad_norm": 0.4055853025049748,
"learning_rate": 2.423407744458822e-05,
"loss": 0.3359,
"step": 572
},
{
"epoch": 2.4456289978678036,
"grad_norm": 0.38176150606644776,
"learning_rate": 2.4175741785573177e-05,
"loss": 0.3068,
"step": 573
},
{
"epoch": 2.449893390191898,
"grad_norm": 0.4069042605717178,
"learning_rate": 2.4117368957948452e-05,
"loss": 0.3253,
"step": 574
},
{
"epoch": 2.4541577825159915,
"grad_norm": 0.3860477934658931,
"learning_rate": 2.4058959481295226e-05,
"loss": 0.3612,
"step": 575
},
{
"epoch": 2.458422174840085,
"grad_norm": 0.36453620340817394,
"learning_rate": 2.4000513875520892e-05,
"loss": 0.2868,
"step": 576
},
{
"epoch": 2.4626865671641793,
"grad_norm": 0.38667475042155025,
"learning_rate": 2.3942032660854434e-05,
"loss": 0.3264,
"step": 577
},
{
"epoch": 2.466950959488273,
"grad_norm": 0.3810779692705642,
"learning_rate": 2.3883516357841797e-05,
"loss": 0.3502,
"step": 578
},
{
"epoch": 2.4712153518123667,
"grad_norm": 0.4029065335258986,
"learning_rate": 2.3824965487341247e-05,
"loss": 0.3164,
"step": 579
},
{
"epoch": 2.4754797441364604,
"grad_norm": 0.3641659784797967,
"learning_rate": 2.376638057051874e-05,
"loss": 0.3689,
"step": 580
},
{
"epoch": 2.479744136460554,
"grad_norm": 0.386320924313445,
"learning_rate": 2.370776212884327e-05,
"loss": 0.3223,
"step": 581
},
{
"epoch": 2.4840085287846483,
"grad_norm": 0.3683303725219809,
"learning_rate": 2.3649110684082258e-05,
"loss": 0.3419,
"step": 582
},
{
"epoch": 2.488272921108742,
"grad_norm": 0.3400504068210789,
"learning_rate": 2.3590426758296873e-05,
"loss": 0.3319,
"step": 583
},
{
"epoch": 2.4925373134328357,
"grad_norm": 0.36853362002125634,
"learning_rate": 2.35317108738374e-05,
"loss": 0.3407,
"step": 584
},
{
"epoch": 2.49680170575693,
"grad_norm": 0.44453763314473116,
"learning_rate": 2.3472963553338614e-05,
"loss": 0.3664,
"step": 585
},
{
"epoch": 2.5010660980810235,
"grad_norm": 0.3896349047477031,
"learning_rate": 2.3414185319715062e-05,
"loss": 0.3057,
"step": 586
},
{
"epoch": 2.5053304904051172,
"grad_norm": 0.4037747109455124,
"learning_rate": 2.335537669615649e-05,
"loss": 0.32,
"step": 587
},
{
"epoch": 2.509594882729211,
"grad_norm": 0.42757034531523247,
"learning_rate": 2.3296538206123134e-05,
"loss": 0.3533,
"step": 588
},
{
"epoch": 2.5138592750533046,
"grad_norm": 0.3590031237695879,
"learning_rate": 2.3237670373341065e-05,
"loss": 0.3362,
"step": 589
},
{
"epoch": 2.518123667377399,
"grad_norm": 0.3564034419723339,
"learning_rate": 2.3178773721797546e-05,
"loss": 0.3356,
"step": 590
},
{
"epoch": 2.5223880597014925,
"grad_norm": 0.3772097468768756,
"learning_rate": 2.311984877573636e-05,
"loss": 0.3423,
"step": 591
},
{
"epoch": 2.526652452025586,
"grad_norm": 0.3572311012517997,
"learning_rate": 2.3060896059653133e-05,
"loss": 0.3298,
"step": 592
},
{
"epoch": 2.5309168443496803,
"grad_norm": 0.48695055246766855,
"learning_rate": 2.3001916098290684e-05,
"loss": 0.3562,
"step": 593
},
{
"epoch": 2.535181236673774,
"grad_norm": 0.36634244613995803,
"learning_rate": 2.2942909416634326e-05,
"loss": 0.3412,
"step": 594
},
{
"epoch": 2.5394456289978677,
"grad_norm": 0.44425887232294786,
"learning_rate": 2.2883876539907238e-05,
"loss": 0.3492,
"step": 595
},
{
"epoch": 2.543710021321962,
"grad_norm": 0.3804698065592149,
"learning_rate": 2.2824817993565743e-05,
"loss": 0.3192,
"step": 596
},
{
"epoch": 2.5479744136460556,
"grad_norm": 0.4056116969076533,
"learning_rate": 2.2765734303294666e-05,
"loss": 0.3225,
"step": 597
},
{
"epoch": 2.5522388059701493,
"grad_norm": 0.35778597376945903,
"learning_rate": 2.2706625995002626e-05,
"loss": 0.318,
"step": 598
},
{
"epoch": 2.556503198294243,
"grad_norm": 0.38168555414145255,
"learning_rate": 2.2647493594817375e-05,
"loss": 0.3601,
"step": 599
},
{
"epoch": 2.5607675906183367,
"grad_norm": 0.37162055381097153,
"learning_rate": 2.2588337629081107e-05,
"loss": 0.3131,
"step": 600
},
{
"epoch": 2.565031982942431,
"grad_norm": 0.39569822220724193,
"learning_rate": 2.2529158624345793e-05,
"loss": 0.3355,
"step": 601
},
{
"epoch": 2.5692963752665245,
"grad_norm": 0.3683693883610303,
"learning_rate": 2.246995710736844e-05,
"loss": 0.3281,
"step": 602
},
{
"epoch": 2.5735607675906182,
"grad_norm": 0.3638653113004468,
"learning_rate": 2.2410733605106462e-05,
"loss": 0.3473,
"step": 603
},
{
"epoch": 2.5778251599147124,
"grad_norm": 0.3432942852936682,
"learning_rate": 2.2351488644712977e-05,
"loss": 0.3174,
"step": 604
},
{
"epoch": 2.582089552238806,
"grad_norm": 0.36441663266011637,
"learning_rate": 2.2292222753532067e-05,
"loss": 0.3404,
"step": 605
},
{
"epoch": 2.5863539445629,
"grad_norm": 0.34359099977889773,
"learning_rate": 2.2232936459094158e-05,
"loss": 0.2855,
"step": 606
},
{
"epoch": 2.5906183368869935,
"grad_norm": 0.41180627075263315,
"learning_rate": 2.2173630289111267e-05,
"loss": 0.3292,
"step": 607
},
{
"epoch": 2.594882729211087,
"grad_norm": 0.40856953637927446,
"learning_rate": 2.211430477147232e-05,
"loss": 0.3148,
"step": 608
},
{
"epoch": 2.5991471215351813,
"grad_norm": 0.4143158196510196,
"learning_rate": 2.205496043423849e-05,
"loss": 0.3582,
"step": 609
},
{
"epoch": 2.603411513859275,
"grad_norm": 0.3688325646692266,
"learning_rate": 2.1995597805638415e-05,
"loss": 0.3033,
"step": 610
},
{
"epoch": 2.6076759061833688,
"grad_norm": 0.44966974266720894,
"learning_rate": 2.1936217414063584e-05,
"loss": 0.336,
"step": 611
},
{
"epoch": 2.611940298507463,
"grad_norm": 0.35613612940024736,
"learning_rate": 2.1876819788063586e-05,
"loss": 0.3155,
"step": 612
},
{
"epoch": 2.6162046908315566,
"grad_norm": 0.4111271913056953,
"learning_rate": 2.1817405456341412e-05,
"loss": 0.3383,
"step": 613
},
{
"epoch": 2.6204690831556503,
"grad_norm": 0.3659202167790694,
"learning_rate": 2.1757974947748762e-05,
"loss": 0.3215,
"step": 614
},
{
"epoch": 2.624733475479744,
"grad_norm": 0.3856052575394882,
"learning_rate": 2.16985287912813e-05,
"loss": 0.31,
"step": 615
},
{
"epoch": 2.6289978678038377,
"grad_norm": 0.41136358511221127,
"learning_rate": 2.1639067516074004e-05,
"loss": 0.3251,
"step": 616
},
{
"epoch": 2.633262260127932,
"grad_norm": 0.3477988590548033,
"learning_rate": 2.1579591651396412e-05,
"loss": 0.2935,
"step": 617
},
{
"epoch": 2.6375266524520256,
"grad_norm": 0.4337010275602838,
"learning_rate": 2.1520101726647922e-05,
"loss": 0.361,
"step": 618
},
{
"epoch": 2.6417910447761193,
"grad_norm": 0.4624391197853646,
"learning_rate": 2.146059827135309e-05,
"loss": 0.333,
"step": 619
},
{
"epoch": 2.6460554371002134,
"grad_norm": 0.3727233265161806,
"learning_rate": 2.1401081815156894e-05,
"loss": 0.296,
"step": 620
},
{
"epoch": 2.650319829424307,
"grad_norm": 0.44643626666607605,
"learning_rate": 2.1341552887820048e-05,
"loss": 0.3495,
"step": 621
},
{
"epoch": 2.654584221748401,
"grad_norm": 0.3970268512437942,
"learning_rate": 2.1282012019214275e-05,
"loss": 0.3386,
"step": 622
},
{
"epoch": 2.6588486140724945,
"grad_norm": 0.37620787661454785,
"learning_rate": 2.122245973931757e-05,
"loss": 0.3234,
"step": 623
},
{
"epoch": 2.663113006396588,
"grad_norm": 0.4494049986409132,
"learning_rate": 2.1162896578209517e-05,
"loss": 0.3425,
"step": 624
},
{
"epoch": 2.6673773987206824,
"grad_norm": 0.3300487285841384,
"learning_rate": 2.110332306606655e-05,
"loss": 0.3558,
"step": 625
},
{
"epoch": 2.671641791044776,
"grad_norm": 0.4730585433014445,
"learning_rate": 2.104373973315724e-05,
"loss": 0.3596,
"step": 626
},
{
"epoch": 2.6759061833688698,
"grad_norm": 0.31988464666714095,
"learning_rate": 2.0984147109837564e-05,
"loss": 0.3197,
"step": 627
},
{
"epoch": 2.680170575692964,
"grad_norm": 0.4024194409780812,
"learning_rate": 2.092454572654621e-05,
"loss": 0.3419,
"step": 628
},
{
"epoch": 2.6844349680170576,
"grad_norm": 0.4185283654543116,
"learning_rate": 2.0864936113799818e-05,
"loss": 0.3554,
"step": 629
},
{
"epoch": 2.6886993603411513,
"grad_norm": 0.4659433660171756,
"learning_rate": 2.0805318802188307e-05,
"loss": 0.3266,
"step": 630
},
{
"epoch": 2.6929637526652455,
"grad_norm": 0.3663919667117319,
"learning_rate": 2.0745694322370095e-05,
"loss": 0.3287,
"step": 631
},
{
"epoch": 2.697228144989339,
"grad_norm": 0.451511426664104,
"learning_rate": 2.0686063205067426e-05,
"loss": 0.3538,
"step": 632
},
{
"epoch": 2.701492537313433,
"grad_norm": 0.3626527728578006,
"learning_rate": 2.0626425981061608e-05,
"loss": 0.3372,
"step": 633
},
{
"epoch": 2.7057569296375266,
"grad_norm": 0.4221364252043816,
"learning_rate": 2.056678318118832e-05,
"loss": 0.3609,
"step": 634
},
{
"epoch": 2.7100213219616203,
"grad_norm": 0.3636772833343896,
"learning_rate": 2.050713533633287e-05,
"loss": 0.3004,
"step": 635
},
{
"epoch": 2.7142857142857144,
"grad_norm": 0.3508912160194277,
"learning_rate": 2.0447482977425465e-05,
"loss": 0.3473,
"step": 636
},
{
"epoch": 2.718550106609808,
"grad_norm": 0.40925231854920796,
"learning_rate": 2.038782663543649e-05,
"loss": 0.3616,
"step": 637
},
{
"epoch": 2.722814498933902,
"grad_norm": 0.3613534312982199,
"learning_rate": 2.0328166841371796e-05,
"loss": 0.3297,
"step": 638
},
{
"epoch": 2.727078891257996,
"grad_norm": 0.38532149432963253,
"learning_rate": 2.0268504126267952e-05,
"loss": 0.3448,
"step": 639
},
{
"epoch": 2.7313432835820897,
"grad_norm": 0.3267536319660621,
"learning_rate": 2.020883902118753e-05,
"loss": 0.3176,
"step": 640
},
{
"epoch": 2.7356076759061834,
"grad_norm": 0.34048997891320393,
"learning_rate": 2.014917205721437e-05,
"loss": 0.3182,
"step": 641
},
{
"epoch": 2.739872068230277,
"grad_norm": 0.3574451853759449,
"learning_rate": 2.008950376544887e-05,
"loss": 0.3194,
"step": 642
},
{
"epoch": 2.7441364605543708,
"grad_norm": 0.3815211412430124,
"learning_rate": 2.0029834677003235e-05,
"loss": 0.3767,
"step": 643
},
{
"epoch": 2.748400852878465,
"grad_norm": 0.34767904398176336,
"learning_rate": 1.9970165322996768e-05,
"loss": 0.3256,
"step": 644
},
{
"epoch": 2.7526652452025586,
"grad_norm": 0.38620602061788006,
"learning_rate": 1.9910496234551132e-05,
"loss": 0.3109,
"step": 645
},
{
"epoch": 2.7569296375266523,
"grad_norm": 0.3924985087086562,
"learning_rate": 1.985082794278563e-05,
"loss": 0.3488,
"step": 646
},
{
"epoch": 2.7611940298507465,
"grad_norm": 0.3511647263545227,
"learning_rate": 1.979116097881248e-05,
"loss": 0.3176,
"step": 647
},
{
"epoch": 2.76545842217484,
"grad_norm": 0.3804886933596516,
"learning_rate": 1.9731495873732055e-05,
"loss": 0.3346,
"step": 648
},
{
"epoch": 2.769722814498934,
"grad_norm": 0.3711827433144586,
"learning_rate": 1.967183315862821e-05,
"loss": 0.3245,
"step": 649
},
{
"epoch": 2.7739872068230276,
"grad_norm": 0.4040189766473357,
"learning_rate": 1.9612173364563517e-05,
"loss": 0.3413,
"step": 650
},
{
"epoch": 2.7782515991471213,
"grad_norm": 0.35670183847640413,
"learning_rate": 1.9552517022574542e-05,
"loss": 0.3335,
"step": 651
},
{
"epoch": 2.7825159914712154,
"grad_norm": 0.3375759136279141,
"learning_rate": 1.9492864663667135e-05,
"loss": 0.2882,
"step": 652
},
{
"epoch": 2.786780383795309,
"grad_norm": 0.3975695376663641,
"learning_rate": 1.9433216818811686e-05,
"loss": 0.3295,
"step": 653
},
{
"epoch": 2.791044776119403,
"grad_norm": 0.3290654794586691,
"learning_rate": 1.93735740189384e-05,
"loss": 0.3169,
"step": 654
},
{
"epoch": 2.795309168443497,
"grad_norm": 0.37186538014031906,
"learning_rate": 1.931393679493258e-05,
"loss": 0.3502,
"step": 655
},
{
"epoch": 2.7995735607675907,
"grad_norm": 0.35915428065348987,
"learning_rate": 1.925430567762991e-05,
"loss": 0.3391,
"step": 656
},
{
"epoch": 2.8038379530916844,
"grad_norm": 0.37547914494206946,
"learning_rate": 1.9194681197811703e-05,
"loss": 0.3224,
"step": 657
},
{
"epoch": 2.8081023454157785,
"grad_norm": 0.36867525713174365,
"learning_rate": 1.9135063886200186e-05,
"loss": 0.339,
"step": 658
},
{
"epoch": 2.8123667377398722,
"grad_norm": 0.35008429854386747,
"learning_rate": 1.9075454273453797e-05,
"loss": 0.3089,
"step": 659
},
{
"epoch": 2.816631130063966,
"grad_norm": 0.39355064952010693,
"learning_rate": 1.901585289016244e-05,
"loss": 0.3641,
"step": 660
},
{
"epoch": 2.8208955223880596,
"grad_norm": 0.3537373717689814,
"learning_rate": 1.8956260266842762e-05,
"loss": 0.3354,
"step": 661
},
{
"epoch": 2.8251599147121533,
"grad_norm": 0.41391996442389295,
"learning_rate": 1.8896676933933455e-05,
"loss": 0.3491,
"step": 662
},
{
"epoch": 2.8294243070362475,
"grad_norm": 0.3569482915635849,
"learning_rate": 1.8837103421790486e-05,
"loss": 0.3226,
"step": 663
},
{
"epoch": 2.833688699360341,
"grad_norm": 0.37516722859555784,
"learning_rate": 1.8777540260682433e-05,
"loss": 0.302,
"step": 664
},
{
"epoch": 2.837953091684435,
"grad_norm": 0.4248830795261121,
"learning_rate": 1.8717987980785728e-05,
"loss": 0.3519,
"step": 665
},
{
"epoch": 2.842217484008529,
"grad_norm": 0.37783359614073125,
"learning_rate": 1.8658447112179952e-05,
"loss": 0.321,
"step": 666
},
{
"epoch": 2.8464818763326227,
"grad_norm": 0.41802490330254377,
"learning_rate": 1.8598918184843116e-05,
"loss": 0.3183,
"step": 667
},
{
"epoch": 2.8507462686567164,
"grad_norm": 0.3718331088787641,
"learning_rate": 1.8539401728646923e-05,
"loss": 0.3411,
"step": 668
},
{
"epoch": 2.85501066098081,
"grad_norm": 0.44945631637607075,
"learning_rate": 1.8479898273352084e-05,
"loss": 0.3516,
"step": 669
},
{
"epoch": 2.859275053304904,
"grad_norm": 0.3579317342747698,
"learning_rate": 1.8420408348603595e-05,
"loss": 0.3389,
"step": 670
},
{
"epoch": 2.863539445628998,
"grad_norm": 0.36099208067311495,
"learning_rate": 1.8360932483926e-05,
"loss": 0.3241,
"step": 671
},
{
"epoch": 2.8678038379530917,
"grad_norm": 0.47001526249967157,
"learning_rate": 1.83014712087187e-05,
"loss": 0.3337,
"step": 672
},
{
"epoch": 2.8720682302771854,
"grad_norm": 0.3714088126438469,
"learning_rate": 1.824202505225125e-05,
"loss": 0.3279,
"step": 673
},
{
"epoch": 2.8763326226012795,
"grad_norm": 0.4175307068350295,
"learning_rate": 1.818259454365859e-05,
"loss": 0.3735,
"step": 674
},
{
"epoch": 2.8805970149253732,
"grad_norm": 0.3550834520314073,
"learning_rate": 1.8123180211936417e-05,
"loss": 0.3165,
"step": 675
},
{
"epoch": 2.884861407249467,
"grad_norm": 0.4383311757544316,
"learning_rate": 1.806378258593642e-05,
"loss": 0.3508,
"step": 676
},
{
"epoch": 2.8891257995735606,
"grad_norm": 0.39587309166564166,
"learning_rate": 1.800440219436159e-05,
"loss": 0.3356,
"step": 677
},
{
"epoch": 2.8933901918976543,
"grad_norm": 0.3556833358344844,
"learning_rate": 1.794503956576152e-05,
"loss": 0.3129,
"step": 678
},
{
"epoch": 2.8976545842217485,
"grad_norm": 0.49312145714140465,
"learning_rate": 1.7885695228527683e-05,
"loss": 0.3646,
"step": 679
},
{
"epoch": 2.901918976545842,
"grad_norm": 0.4062615415192085,
"learning_rate": 1.782636971088874e-05,
"loss": 0.3253,
"step": 680
},
{
"epoch": 2.906183368869936,
"grad_norm": 0.3933273488643739,
"learning_rate": 1.776706354090585e-05,
"loss": 0.3456,
"step": 681
},
{
"epoch": 2.91044776119403,
"grad_norm": 0.43585170578526733,
"learning_rate": 1.7707777246467933e-05,
"loss": 0.358,
"step": 682
},
{
"epoch": 2.9147121535181237,
"grad_norm": 0.40241469625160387,
"learning_rate": 1.7648511355287037e-05,
"loss": 0.3261,
"step": 683
},
{
"epoch": 2.9189765458422174,
"grad_norm": 0.4205344502308214,
"learning_rate": 1.758926639489354e-05,
"loss": 0.3076,
"step": 684
},
{
"epoch": 2.923240938166311,
"grad_norm": 0.38789875148418834,
"learning_rate": 1.7530042892631568e-05,
"loss": 0.3185,
"step": 685
},
{
"epoch": 2.927505330490405,
"grad_norm": 0.3516989414998489,
"learning_rate": 1.7470841375654214e-05,
"loss": 0.3049,
"step": 686
},
{
"epoch": 2.931769722814499,
"grad_norm": 0.3839851773789998,
"learning_rate": 1.7411662370918893e-05,
"loss": 0.3595,
"step": 687
},
{
"epoch": 2.9360341151385927,
"grad_norm": 0.42202276848131426,
"learning_rate": 1.7352506405182635e-05,
"loss": 0.3483,
"step": 688
},
{
"epoch": 2.9402985074626864,
"grad_norm": 0.3429765228499871,
"learning_rate": 1.7293374004997384e-05,
"loss": 0.3195,
"step": 689
},
{
"epoch": 2.9445628997867805,
"grad_norm": 0.35536478053153486,
"learning_rate": 1.7234265696705344e-05,
"loss": 0.3421,
"step": 690
},
{
"epoch": 2.9488272921108742,
"grad_norm": 0.35560187786546854,
"learning_rate": 1.717518200643426e-05,
"loss": 0.3342,
"step": 691
},
{
"epoch": 2.953091684434968,
"grad_norm": 0.3995220644927285,
"learning_rate": 1.711612346009277e-05,
"loss": 0.3359,
"step": 692
},
{
"epoch": 2.957356076759062,
"grad_norm": 0.3695517373221299,
"learning_rate": 1.7057090583365678e-05,
"loss": 0.3403,
"step": 693
},
{
"epoch": 2.961620469083156,
"grad_norm": 0.300686396282184,
"learning_rate": 1.699808390170933e-05,
"loss": 0.2988,
"step": 694
},
{
"epoch": 2.9658848614072495,
"grad_norm": 0.4104459486229632,
"learning_rate": 1.6939103940346874e-05,
"loss": 0.3721,
"step": 695
},
{
"epoch": 2.970149253731343,
"grad_norm": 0.30710749710833807,
"learning_rate": 1.6880151224263646e-05,
"loss": 0.3053,
"step": 696
},
{
"epoch": 2.974413646055437,
"grad_norm": 0.4085034233515771,
"learning_rate": 1.6821226278202458e-05,
"loss": 0.3504,
"step": 697
},
{
"epoch": 2.978678038379531,
"grad_norm": 0.3714715567431405,
"learning_rate": 1.676232962665894e-05,
"loss": 0.333,
"step": 698
},
{
"epoch": 2.9829424307036247,
"grad_norm": 0.3676846794603304,
"learning_rate": 1.6703461793876876e-05,
"loss": 0.3326,
"step": 699
},
{
"epoch": 2.9872068230277184,
"grad_norm": 0.4106369860379901,
"learning_rate": 1.6644623303843518e-05,
"loss": 0.3095,
"step": 700
},
{
"epoch": 2.9914712153518126,
"grad_norm": 0.37751964437773083,
"learning_rate": 1.6585814680284945e-05,
"loss": 0.3751,
"step": 701
},
{
"epoch": 2.9957356076759063,
"grad_norm": 0.3902738884075048,
"learning_rate": 1.6527036446661396e-05,
"loss": 0.3261,
"step": 702
},
{
"epoch": 3.0010660980810235,
"grad_norm": 0.7206224820790956,
"learning_rate": 1.64682891261626e-05,
"loss": 0.5125,
"step": 703
},
{
"epoch": 3.0053304904051172,
"grad_norm": 0.5281661795561857,
"learning_rate": 1.6409573241703138e-05,
"loss": 0.2907,
"step": 704
},
{
"epoch": 3.009594882729211,
"grad_norm": 0.42118092897335085,
"learning_rate": 1.635088931591775e-05,
"loss": 0.2279,
"step": 705
},
{
"epoch": 3.013859275053305,
"grad_norm": 0.6733763572008821,
"learning_rate": 1.6292237871156734e-05,
"loss": 0.2392,
"step": 706
},
{
"epoch": 3.018123667377399,
"grad_norm": 0.4467857416127825,
"learning_rate": 1.6233619429481268e-05,
"loss": 0.2593,
"step": 707
},
{
"epoch": 3.0223880597014925,
"grad_norm": 0.4960925783392073,
"learning_rate": 1.6175034512658753e-05,
"loss": 0.2384,
"step": 708
},
{
"epoch": 3.026652452025586,
"grad_norm": 0.5148846864817478,
"learning_rate": 1.6116483642158213e-05,
"loss": 0.2707,
"step": 709
},
{
"epoch": 3.0309168443496803,
"grad_norm": 0.3489966405865502,
"learning_rate": 1.6057967339145572e-05,
"loss": 0.2101,
"step": 710
},
{
"epoch": 3.035181236673774,
"grad_norm": 0.48600165080284125,
"learning_rate": 1.5999486124479115e-05,
"loss": 0.2589,
"step": 711
},
{
"epoch": 3.0394456289978677,
"grad_norm": 0.4671615074532655,
"learning_rate": 1.594104051870478e-05,
"loss": 0.2443,
"step": 712
},
{
"epoch": 3.0437100213219614,
"grad_norm": 0.4565693482005637,
"learning_rate": 1.588263104205155e-05,
"loss": 0.2356,
"step": 713
},
{
"epoch": 3.0479744136460556,
"grad_norm": 0.4030961726983731,
"learning_rate": 1.5824258214426833e-05,
"loss": 0.2489,
"step": 714
},
{
"epoch": 3.0522388059701493,
"grad_norm": 0.4183617430166792,
"learning_rate": 1.5765922555411793e-05,
"loss": 0.2525,
"step": 715
},
{
"epoch": 3.056503198294243,
"grad_norm": 0.4104838834580541,
"learning_rate": 1.570762458425677e-05,
"loss": 0.2444,
"step": 716
},
{
"epoch": 3.0607675906183367,
"grad_norm": 0.39244414919578313,
"learning_rate": 1.5649364819876655e-05,
"loss": 0.2483,
"step": 717
},
{
"epoch": 3.065031982942431,
"grad_norm": 0.36480709630972974,
"learning_rate": 1.559114378084623e-05,
"loss": 0.2216,
"step": 718
},
{
"epoch": 3.0692963752665245,
"grad_norm": 0.3946748564748482,
"learning_rate": 1.5532961985395605e-05,
"loss": 0.2644,
"step": 719
},
{
"epoch": 3.0735607675906182,
"grad_norm": 0.38069525301339247,
"learning_rate": 1.547481995140556e-05,
"loss": 0.241,
"step": 720
},
{
"epoch": 3.077825159914712,
"grad_norm": 0.3544801582734647,
"learning_rate": 1.5416718196402966e-05,
"loss": 0.224,
"step": 721
},
{
"epoch": 3.082089552238806,
"grad_norm": 0.39637110438337914,
"learning_rate": 1.535865723755616e-05,
"loss": 0.2661,
"step": 722
},
{
"epoch": 3.0863539445629,
"grad_norm": 0.355117830176425,
"learning_rate": 1.5300637591670357e-05,
"loss": 0.2448,
"step": 723
},
{
"epoch": 3.0906183368869935,
"grad_norm": 0.3712024712331717,
"learning_rate": 1.5242659775183039e-05,
"loss": 0.231,
"step": 724
},
{
"epoch": 3.094882729211087,
"grad_norm": 0.34556003676946495,
"learning_rate": 1.5184724304159368e-05,
"loss": 0.2438,
"step": 725
},
{
"epoch": 3.0991471215351813,
"grad_norm": 0.3833769510169898,
"learning_rate": 1.5126831694287564e-05,
"loss": 0.2547,
"step": 726
},
{
"epoch": 3.103411513859275,
"grad_norm": 0.32399743754417243,
"learning_rate": 1.5068982460874357e-05,
"loss": 0.2392,
"step": 727
},
{
"epoch": 3.1076759061833688,
"grad_norm": 0.32323430623785687,
"learning_rate": 1.5011177118840376e-05,
"loss": 0.2504,
"step": 728
},
{
"epoch": 3.111940298507463,
"grad_norm": 0.32697679971993443,
"learning_rate": 1.4953416182715566e-05,
"loss": 0.2636,
"step": 729
},
{
"epoch": 3.1162046908315566,
"grad_norm": 0.32920439795771206,
"learning_rate": 1.4895700166634615e-05,
"loss": 0.2428,
"step": 730
},
{
"epoch": 3.1204690831556503,
"grad_norm": 0.3389965095480859,
"learning_rate": 1.4838029584332364e-05,
"loss": 0.2411,
"step": 731
},
{
"epoch": 3.124733475479744,
"grad_norm": 0.3407193460469488,
"learning_rate": 1.478040494913926e-05,
"loss": 0.2659,
"step": 732
},
{
"epoch": 3.128997867803838,
"grad_norm": 0.32204674701899744,
"learning_rate": 1.4722826773976757e-05,
"loss": 0.2289,
"step": 733
},
{
"epoch": 3.133262260127932,
"grad_norm": 0.3246267785751812,
"learning_rate": 1.4665295571352776e-05,
"loss": 0.2608,
"step": 734
},
{
"epoch": 3.1375266524520256,
"grad_norm": 0.33825862655046196,
"learning_rate": 1.460781185335713e-05,
"loss": 0.2376,
"step": 735
},
{
"epoch": 3.1417910447761193,
"grad_norm": 0.2967323124289374,
"learning_rate": 1.4550376131656952e-05,
"loss": 0.2549,
"step": 736
},
{
"epoch": 3.1460554371002134,
"grad_norm": 0.3098012403561765,
"learning_rate": 1.449298891749217e-05,
"loss": 0.2364,
"step": 737
},
{
"epoch": 3.150319829424307,
"grad_norm": 0.2997550978061584,
"learning_rate": 1.443565072167095e-05,
"loss": 0.2293,
"step": 738
},
{
"epoch": 3.154584221748401,
"grad_norm": 0.32943504430215304,
"learning_rate": 1.4378362054565123e-05,
"loss": 0.2881,
"step": 739
},
{
"epoch": 3.1588486140724945,
"grad_norm": 0.3109316470570683,
"learning_rate": 1.4321123426105686e-05,
"loss": 0.221,
"step": 740
},
{
"epoch": 3.1631130063965887,
"grad_norm": 0.34529106989101405,
"learning_rate": 1.4263935345778202e-05,
"loss": 0.2709,
"step": 741
},
{
"epoch": 3.1673773987206824,
"grad_norm": 0.2975749768452129,
"learning_rate": 1.420679832261833e-05,
"loss": 0.2276,
"step": 742
},
{
"epoch": 3.171641791044776,
"grad_norm": 0.29508491762785355,
"learning_rate": 1.4149712865207256e-05,
"loss": 0.2344,
"step": 743
},
{
"epoch": 3.1759061833688698,
"grad_norm": 0.2973838926689209,
"learning_rate": 1.409267948166718e-05,
"loss": 0.242,
"step": 744
},
{
"epoch": 3.180170575692964,
"grad_norm": 0.3144417744580649,
"learning_rate": 1.4035698679656777e-05,
"loss": 0.2497,
"step": 745
},
{
"epoch": 3.1844349680170576,
"grad_norm": 0.30367019765704484,
"learning_rate": 1.397877096636671e-05,
"loss": 0.2429,
"step": 746
},
{
"epoch": 3.1886993603411513,
"grad_norm": 0.2977699986334773,
"learning_rate": 1.3921896848515064e-05,
"loss": 0.2495,
"step": 747
},
{
"epoch": 3.192963752665245,
"grad_norm": 0.30274038191648445,
"learning_rate": 1.3865076832342897e-05,
"loss": 0.23,
"step": 748
},
{
"epoch": 3.197228144989339,
"grad_norm": 0.2916186731441911,
"learning_rate": 1.3808311423609686e-05,
"loss": 0.233,
"step": 749
},
{
"epoch": 3.201492537313433,
"grad_norm": 0.32401373636645003,
"learning_rate": 1.3751601127588849e-05,
"loss": 0.2529,
"step": 750
},
{
"epoch": 3.2057569296375266,
"grad_norm": 0.3465730918845462,
"learning_rate": 1.3694946449063249e-05,
"loss": 0.2621,
"step": 751
},
{
"epoch": 3.2100213219616203,
"grad_norm": 0.32661862365231215,
"learning_rate": 1.3638347892320671e-05,
"loss": 0.2498,
"step": 752
},
{
"epoch": 3.2142857142857144,
"grad_norm": 0.28989193446587946,
"learning_rate": 1.3581805961149371e-05,
"loss": 0.2154,
"step": 753
},
{
"epoch": 3.218550106609808,
"grad_norm": 0.32094760076315715,
"learning_rate": 1.3525321158833582e-05,
"loss": 0.255,
"step": 754
},
{
"epoch": 3.222814498933902,
"grad_norm": 0.3026223441819097,
"learning_rate": 1.3468893988149014e-05,
"loss": 0.2311,
"step": 755
},
{
"epoch": 3.227078891257996,
"grad_norm": 0.34131523556890064,
"learning_rate": 1.341252495135841e-05,
"loss": 0.2379,
"step": 756
},
{
"epoch": 3.2313432835820897,
"grad_norm": 0.3128286044976363,
"learning_rate": 1.3356214550207038e-05,
"loss": 0.2549,
"step": 757
},
{
"epoch": 3.2356076759061834,
"grad_norm": 0.29134924946443214,
"learning_rate": 1.3299963285918265e-05,
"loss": 0.2423,
"step": 758
},
{
"epoch": 3.239872068230277,
"grad_norm": 0.32994384460086157,
"learning_rate": 1.324377165918906e-05,
"loss": 0.2668,
"step": 759
},
{
"epoch": 3.2441364605543708,
"grad_norm": 0.30399396622625136,
"learning_rate": 1.3187640170185563e-05,
"loss": 0.2388,
"step": 760
},
{
"epoch": 3.248400852878465,
"grad_norm": 0.298330256235117,
"learning_rate": 1.3131569318538629e-05,
"loss": 0.216,
"step": 761
},
{
"epoch": 3.2526652452025586,
"grad_norm": 0.3356191635268766,
"learning_rate": 1.3075559603339354e-05,
"loss": 0.2515,
"step": 762
},
{
"epoch": 3.2569296375266523,
"grad_norm": 0.3278526977727378,
"learning_rate": 1.3019611523134672e-05,
"loss": 0.2514,
"step": 763
},
{
"epoch": 3.2611940298507465,
"grad_norm": 0.3076746070268428,
"learning_rate": 1.2963725575922896e-05,
"loss": 0.2556,
"step": 764
},
{
"epoch": 3.26545842217484,
"grad_norm": 0.302825164819859,
"learning_rate": 1.2907902259149287e-05,
"loss": 0.2343,
"step": 765
},
{
"epoch": 3.269722814498934,
"grad_norm": 0.3013156949351398,
"learning_rate": 1.2852142069701623e-05,
"loss": 0.2541,
"step": 766
},
{
"epoch": 3.2739872068230276,
"grad_norm": 0.2976355674558156,
"learning_rate": 1.2796445503905797e-05,
"loss": 0.2309,
"step": 767
},
{
"epoch": 3.2782515991471217,
"grad_norm": 0.29706305491961443,
"learning_rate": 1.274081305752135e-05,
"loss": 0.2295,
"step": 768
},
{
"epoch": 3.2825159914712154,
"grad_norm": 0.2881769876779232,
"learning_rate": 1.268524522573712e-05,
"loss": 0.2568,
"step": 769
},
{
"epoch": 3.286780383795309,
"grad_norm": 0.30859507617667564,
"learning_rate": 1.2629742503166797e-05,
"loss": 0.277,
"step": 770
},
{
"epoch": 3.291044776119403,
"grad_norm": 0.2700505406761647,
"learning_rate": 1.2574305383844528e-05,
"loss": 0.22,
"step": 771
},
{
"epoch": 3.295309168443497,
"grad_norm": 0.2700835765523195,
"learning_rate": 1.2518934361220523e-05,
"loss": 0.2129,
"step": 772
},
{
"epoch": 3.2995735607675907,
"grad_norm": 0.33992965453093965,
"learning_rate": 1.2463629928156653e-05,
"loss": 0.2461,
"step": 773
},
{
"epoch": 3.3038379530916844,
"grad_norm": 0.295791608172303,
"learning_rate": 1.2408392576922075e-05,
"loss": 0.2656,
"step": 774
},
{
"epoch": 3.308102345415778,
"grad_norm": 0.2911758696308667,
"learning_rate": 1.2353222799188844e-05,
"loss": 0.2465,
"step": 775
},
{
"epoch": 3.3123667377398722,
"grad_norm": 0.27346284641679014,
"learning_rate": 1.2298121086027543e-05,
"loss": 0.2263,
"step": 776
},
{
"epoch": 3.316631130063966,
"grad_norm": 0.32999783460577875,
"learning_rate": 1.2243087927902905e-05,
"loss": 0.2593,
"step": 777
},
{
"epoch": 3.3208955223880596,
"grad_norm": 0.2830646454254038,
"learning_rate": 1.2188123814669437e-05,
"loss": 0.2335,
"step": 778
},
{
"epoch": 3.3251599147121533,
"grad_norm": 0.32255253651431215,
"learning_rate": 1.2133229235567085e-05,
"loss": 0.2603,
"step": 779
},
{
"epoch": 3.3294243070362475,
"grad_norm": 0.32944650851857604,
"learning_rate": 1.2078404679216864e-05,
"loss": 0.2538,
"step": 780
},
{
"epoch": 3.333688699360341,
"grad_norm": 0.30124840761449095,
"learning_rate": 1.2023650633616511e-05,
"loss": 0.2263,
"step": 781
},
{
"epoch": 3.337953091684435,
"grad_norm": 0.32447240154800916,
"learning_rate": 1.1968967586136144e-05,
"loss": 0.2648,
"step": 782
},
{
"epoch": 3.342217484008529,
"grad_norm": 0.2975315219529145,
"learning_rate": 1.1914356023513904e-05,
"loss": 0.2279,
"step": 783
},
{
"epoch": 3.3464818763326227,
"grad_norm": 0.3031477389481757,
"learning_rate": 1.185981643185166e-05,
"loss": 0.2596,
"step": 784
},
{
"epoch": 3.3507462686567164,
"grad_norm": 0.30806045278790106,
"learning_rate": 1.1805349296610658e-05,
"loss": 0.2474,
"step": 785
},
{
"epoch": 3.35501066098081,
"grad_norm": 0.28929666459434256,
"learning_rate": 1.1750955102607193e-05,
"loss": 0.2426,
"step": 786
},
{
"epoch": 3.359275053304904,
"grad_norm": 0.3007594311300095,
"learning_rate": 1.1696634334008317e-05,
"loss": 0.2629,
"step": 787
},
{
"epoch": 3.363539445628998,
"grad_norm": 0.3098618482796862,
"learning_rate": 1.1642387474327505e-05,
"loss": 0.229,
"step": 788
},
{
"epoch": 3.3678038379530917,
"grad_norm": 0.2933436606084351,
"learning_rate": 1.1588215006420374e-05,
"loss": 0.2526,
"step": 789
},
{
"epoch": 3.3720682302771854,
"grad_norm": 0.2716612613001274,
"learning_rate": 1.1534117412480366e-05,
"loss": 0.2185,
"step": 790
},
{
"epoch": 3.3763326226012795,
"grad_norm": 0.3085235399779269,
"learning_rate": 1.1480095174034464e-05,
"loss": 0.2574,
"step": 791
},
{
"epoch": 3.3805970149253732,
"grad_norm": 0.28565152972713814,
"learning_rate": 1.1426148771938915e-05,
"loss": 0.2276,
"step": 792
},
{
"epoch": 3.384861407249467,
"grad_norm": 0.30010302424063223,
"learning_rate": 1.1372278686374935e-05,
"loss": 0.2564,
"step": 793
},
{
"epoch": 3.3891257995735606,
"grad_norm": 0.30330171467680656,
"learning_rate": 1.1318485396844427e-05,
"loss": 0.2231,
"step": 794
},
{
"epoch": 3.3933901918976543,
"grad_norm": 0.31861742745133986,
"learning_rate": 1.1264769382165748e-05,
"loss": 0.2343,
"step": 795
},
{
"epoch": 3.3976545842217485,
"grad_norm": 0.30438593980087136,
"learning_rate": 1.1211131120469413e-05,
"loss": 0.2446,
"step": 796
},
{
"epoch": 3.401918976545842,
"grad_norm": 0.28813768071806006,
"learning_rate": 1.1157571089193856e-05,
"loss": 0.2242,
"step": 797
},
{
"epoch": 3.406183368869936,
"grad_norm": 0.30606850192411184,
"learning_rate": 1.110408976508118e-05,
"loss": 0.2552,
"step": 798
},
{
"epoch": 3.41044776119403,
"grad_norm": 0.3145029450607833,
"learning_rate": 1.105068762417289e-05,
"loss": 0.2496,
"step": 799
},
{
"epoch": 3.4147121535181237,
"grad_norm": 0.2924588998467964,
"learning_rate": 1.0997365141805697e-05,
"loss": 0.2464,
"step": 800
},
{
"epoch": 3.4189765458422174,
"grad_norm": 0.30443844783069873,
"learning_rate": 1.094412279260726e-05,
"loss": 0.2395,
"step": 801
},
{
"epoch": 3.423240938166311,
"grad_norm": 0.3123338581477662,
"learning_rate": 1.0890961050491952e-05,
"loss": 0.2351,
"step": 802
},
{
"epoch": 3.4275053304904053,
"grad_norm": 0.2964017517166986,
"learning_rate": 1.0837880388656687e-05,
"loss": 0.2416,
"step": 803
},
{
"epoch": 3.431769722814499,
"grad_norm": 0.3281476512317521,
"learning_rate": 1.0784881279576635e-05,
"loss": 0.2383,
"step": 804
},
{
"epoch": 3.4360341151385927,
"grad_norm": 0.31829793297554937,
"learning_rate": 1.073196419500109e-05,
"loss": 0.2478,
"step": 805
},
{
"epoch": 3.4402985074626864,
"grad_norm": 0.32602829624699914,
"learning_rate": 1.067912960594923e-05,
"loss": 0.2241,
"step": 806
},
{
"epoch": 3.4445628997867805,
"grad_norm": 0.3191426788748806,
"learning_rate": 1.0626377982705929e-05,
"loss": 0.2418,
"step": 807
},
{
"epoch": 3.4488272921108742,
"grad_norm": 0.3021368983804344,
"learning_rate": 1.0573709794817586e-05,
"loss": 0.2459,
"step": 808
},
{
"epoch": 3.453091684434968,
"grad_norm": 0.3307143438925763,
"learning_rate": 1.0521125511087927e-05,
"loss": 0.2456,
"step": 809
},
{
"epoch": 3.4573560767590616,
"grad_norm": 0.3175944122781918,
"learning_rate": 1.0468625599573842e-05,
"loss": 0.2478,
"step": 810
},
{
"epoch": 3.461620469083156,
"grad_norm": 0.28439054844349576,
"learning_rate": 1.0416210527581221e-05,
"loss": 0.2457,
"step": 811
},
{
"epoch": 3.4658848614072495,
"grad_norm": 0.33296617403496936,
"learning_rate": 1.0363880761660786e-05,
"loss": 0.2438,
"step": 812
},
{
"epoch": 3.470149253731343,
"grad_norm": 0.29277732354416547,
"learning_rate": 1.0311636767603952e-05,
"loss": 0.2244,
"step": 813
},
{
"epoch": 3.474413646055437,
"grad_norm": 0.28674974708683865,
"learning_rate": 1.025947901043865e-05,
"loss": 0.2279,
"step": 814
},
{
"epoch": 3.478678038379531,
"grad_norm": 0.3469212992340864,
"learning_rate": 1.020740795442523e-05,
"loss": 0.2564,
"step": 815
},
{
"epoch": 3.4829424307036247,
"grad_norm": 0.30741933996359333,
"learning_rate": 1.0155424063052306e-05,
"loss": 0.246,
"step": 816
},
{
"epoch": 3.4872068230277184,
"grad_norm": 0.2985597325581848,
"learning_rate": 1.0103527799032627e-05,
"loss": 0.2449,
"step": 817
},
{
"epoch": 3.4914712153518126,
"grad_norm": 0.32011526265805135,
"learning_rate": 1.0051719624298962e-05,
"loss": 0.233,
"step": 818
},
{
"epoch": 3.4957356076759063,
"grad_norm": 0.30558408877185367,
"learning_rate": 1.0000000000000006e-05,
"loss": 0.226,
"step": 819
},
{
"epoch": 3.5,
"grad_norm": 0.31726146235825886,
"learning_rate": 9.948369386496231e-06,
"loss": 0.243,
"step": 820
},
{
"epoch": 3.5042643923240937,
"grad_norm": 0.311677236974838,
"learning_rate": 9.89682824335584e-06,
"loss": 0.2547,
"step": 821
},
{
"epoch": 3.5085287846481874,
"grad_norm": 0.29139088680330627,
"learning_rate": 9.84537702935065e-06,
"loss": 0.2261,
"step": 822
},
{
"epoch": 3.5127931769722816,
"grad_norm": 0.33737575572615713,
"learning_rate": 9.794016202452003e-06,
"loss": 0.2431,
"step": 823
},
{
"epoch": 3.5170575692963753,
"grad_norm": 0.3141703384709648,
"learning_rate": 9.742746219826715e-06,
"loss": 0.251,
"step": 824
},
{
"epoch": 3.521321961620469,
"grad_norm": 0.2934897661263017,
"learning_rate": 9.691567537832964e-06,
"loss": 0.2494,
"step": 825
},
{
"epoch": 3.525586353944563,
"grad_norm": 0.32253152245304295,
"learning_rate": 9.640480612016272e-06,
"loss": 0.2679,
"step": 826
},
{
"epoch": 3.529850746268657,
"grad_norm": 0.26852755792702954,
"learning_rate": 9.589485897105431e-06,
"loss": 0.2129,
"step": 827
},
{
"epoch": 3.5341151385927505,
"grad_norm": 0.29815354851790776,
"learning_rate": 9.538583847008452e-06,
"loss": 0.238,
"step": 828
},
{
"epoch": 3.538379530916844,
"grad_norm": 0.30229937242138655,
"learning_rate": 9.487774914808536e-06,
"loss": 0.2302,
"step": 829
},
{
"epoch": 3.542643923240938,
"grad_norm": 0.3277955189869379,
"learning_rate": 9.437059552760018e-06,
"loss": 0.242,
"step": 830
},
{
"epoch": 3.546908315565032,
"grad_norm": 0.3166866159369122,
"learning_rate": 9.386438212284372e-06,
"loss": 0.2438,
"step": 831
},
{
"epoch": 3.5511727078891258,
"grad_norm": 0.2749474828352015,
"learning_rate": 9.33591134396618e-06,
"loss": 0.2152,
"step": 832
},
{
"epoch": 3.5554371002132195,
"grad_norm": 0.30041265733845113,
"learning_rate": 9.28547939754911e-06,
"loss": 0.2362,
"step": 833
},
{
"epoch": 3.5597014925373136,
"grad_norm": 0.3146999658258432,
"learning_rate": 9.235142821931928e-06,
"loss": 0.244,
"step": 834
},
{
"epoch": 3.5639658848614073,
"grad_norm": 0.3293166515443863,
"learning_rate": 9.1849020651645e-06,
"loss": 0.2585,
"step": 835
},
{
"epoch": 3.568230277185501,
"grad_norm": 0.2760295232594156,
"learning_rate": 9.134757574443793e-06,
"loss": 0.2258,
"step": 836
},
{
"epoch": 3.572494669509595,
"grad_norm": 0.30975186669845683,
"learning_rate": 9.084709796109907e-06,
"loss": 0.2515,
"step": 837
},
{
"epoch": 3.576759061833689,
"grad_norm": 0.33162696304477796,
"learning_rate": 9.034759175642099e-06,
"loss": 0.2595,
"step": 838
},
{
"epoch": 3.5810234541577826,
"grad_norm": 0.2883332393947174,
"learning_rate": 8.984906157654818e-06,
"loss": 0.205,
"step": 839
},
{
"epoch": 3.5852878464818763,
"grad_norm": 0.2978502889456044,
"learning_rate": 8.93515118589373e-06,
"loss": 0.2458,
"step": 840
},
{
"epoch": 3.58955223880597,
"grad_norm": 0.29408879165914026,
"learning_rate": 8.885494703231798e-06,
"loss": 0.2392,
"step": 841
},
{
"epoch": 3.593816631130064,
"grad_norm": 0.3249087183509099,
"learning_rate": 8.835937151665328e-06,
"loss": 0.257,
"step": 842
},
{
"epoch": 3.598081023454158,
"grad_norm": 0.32232731451375923,
"learning_rate": 8.786478972310023e-06,
"loss": 0.2469,
"step": 843
},
{
"epoch": 3.6023454157782515,
"grad_norm": 0.29034742011961323,
"learning_rate": 8.737120605397071e-06,
"loss": 0.2484,
"step": 844
},
{
"epoch": 3.6066098081023457,
"grad_norm": 0.28525812573618703,
"learning_rate": 8.687862490269232e-06,
"loss": 0.248,
"step": 845
},
{
"epoch": 3.6108742004264394,
"grad_norm": 0.2872684471511718,
"learning_rate": 8.638705065376887e-06,
"loss": 0.2351,
"step": 846
},
{
"epoch": 3.615138592750533,
"grad_norm": 0.30255280187752015,
"learning_rate": 8.589648768274192e-06,
"loss": 0.2377,
"step": 847
},
{
"epoch": 3.6194029850746268,
"grad_norm": 0.28771967192257747,
"learning_rate": 8.54069403561515e-06,
"loss": 0.2422,
"step": 848
},
{
"epoch": 3.6236673773987205,
"grad_norm": 0.31269556887708466,
"learning_rate": 8.491841303149728e-06,
"loss": 0.2727,
"step": 849
},
{
"epoch": 3.6279317697228146,
"grad_norm": 0.3013824576718503,
"learning_rate": 8.443091005719987e-06,
"loss": 0.2425,
"step": 850
},
{
"epoch": 3.6321961620469083,
"grad_norm": 0.2648872302755811,
"learning_rate": 8.394443577256192e-06,
"loss": 0.2167,
"step": 851
},
{
"epoch": 3.636460554371002,
"grad_norm": 0.2896543847103546,
"learning_rate": 8.345899450772975e-06,
"loss": 0.2321,
"step": 852
},
{
"epoch": 3.640724946695096,
"grad_norm": 0.2966430163917612,
"learning_rate": 8.297459058365472e-06,
"loss": 0.2536,
"step": 853
},
{
"epoch": 3.64498933901919,
"grad_norm": 0.29102715377273775,
"learning_rate": 8.249122831205458e-06,
"loss": 0.2352,
"step": 854
},
{
"epoch": 3.6492537313432836,
"grad_norm": 0.276871037645944,
"learning_rate": 8.200891199537549e-06,
"loss": 0.2314,
"step": 855
},
{
"epoch": 3.6535181236673773,
"grad_norm": 0.29679802277295453,
"learning_rate": 8.152764592675317e-06,
"loss": 0.2466,
"step": 856
},
{
"epoch": 3.657782515991471,
"grad_norm": 0.3288786101624257,
"learning_rate": 8.104743438997527e-06,
"loss": 0.2481,
"step": 857
},
{
"epoch": 3.662046908315565,
"grad_norm": 0.2810797169473959,
"learning_rate": 8.056828165944282e-06,
"loss": 0.2462,
"step": 858
},
{
"epoch": 3.666311300639659,
"grad_norm": 0.27585540738489867,
"learning_rate": 8.009019200013237e-06,
"loss": 0.2356,
"step": 859
},
{
"epoch": 3.6705756929637525,
"grad_norm": 0.32641357147184774,
"learning_rate": 7.961316966755806e-06,
"loss": 0.2456,
"step": 860
},
{
"epoch": 3.6748400852878467,
"grad_norm": 0.31910225006889814,
"learning_rate": 7.913721890773354e-06,
"loss": 0.2721,
"step": 861
},
{
"epoch": 3.6791044776119404,
"grad_norm": 0.3397767935316877,
"learning_rate": 7.866234395713441e-06,
"loss": 0.2709,
"step": 862
},
{
"epoch": 3.683368869936034,
"grad_norm": 0.296901908055256,
"learning_rate": 7.818854904266033e-06,
"loss": 0.2503,
"step": 863
},
{
"epoch": 3.6876332622601278,
"grad_norm": 0.2808208297289632,
"learning_rate": 7.771583838159756e-06,
"loss": 0.2399,
"step": 864
},
{
"epoch": 3.6918976545842215,
"grad_norm": 0.29198514248012775,
"learning_rate": 7.724421618158122e-06,
"loss": 0.2666,
"step": 865
},
{
"epoch": 3.6961620469083156,
"grad_norm": 0.29995463338764394,
"learning_rate": 7.677368664055815e-06,
"loss": 0.2396,
"step": 866
},
{
"epoch": 3.7004264392324093,
"grad_norm": 0.28578829975709263,
"learning_rate": 7.630425394674903e-06,
"loss": 0.257,
"step": 867
},
{
"epoch": 3.704690831556503,
"grad_norm": 0.28021430102270617,
"learning_rate": 7.583592227861167e-06,
"loss": 0.2236,
"step": 868
},
{
"epoch": 3.708955223880597,
"grad_norm": 0.3268601383355693,
"learning_rate": 7.536869580480351e-06,
"loss": 0.2458,
"step": 869
},
{
"epoch": 3.713219616204691,
"grad_norm": 0.290893866821192,
"learning_rate": 7.49025786841445e-06,
"loss": 0.2358,
"step": 870
},
{
"epoch": 3.7174840085287846,
"grad_norm": 0.2944701896730605,
"learning_rate": 7.443757506558033e-06,
"loss": 0.2386,
"step": 871
},
{
"epoch": 3.7217484008528787,
"grad_norm": 0.2986050086210331,
"learning_rate": 7.397368908814499e-06,
"loss": 0.2637,
"step": 872
},
{
"epoch": 3.7260127931769724,
"grad_norm": 0.289696846898496,
"learning_rate": 7.3510924880924575e-06,
"loss": 0.2266,
"step": 873
},
{
"epoch": 3.730277185501066,
"grad_norm": 0.3240013899409774,
"learning_rate": 7.3049286563020036e-06,
"loss": 0.2466,
"step": 874
},
{
"epoch": 3.73454157782516,
"grad_norm": 0.3055316273372297,
"learning_rate": 7.258877824351081e-06,
"loss": 0.2439,
"step": 875
},
{
"epoch": 3.7388059701492535,
"grad_norm": 0.2904274401241053,
"learning_rate": 7.212940402141808e-06,
"loss": 0.2157,
"step": 876
},
{
"epoch": 3.7430703624733477,
"grad_norm": 0.3054868031454664,
"learning_rate": 7.16711679856682e-06,
"loss": 0.2483,
"step": 877
},
{
"epoch": 3.7473347547974414,
"grad_norm": 0.2670625511172484,
"learning_rate": 7.121407421505666e-06,
"loss": 0.2206,
"step": 878
},
{
"epoch": 3.751599147121535,
"grad_norm": 0.2721294014835585,
"learning_rate": 7.075812677821145e-06,
"loss": 0.2514,
"step": 879
},
{
"epoch": 3.7558635394456292,
"grad_norm": 0.2808108535749494,
"learning_rate": 7.030332973355696e-06,
"loss": 0.2198,
"step": 880
},
{
"epoch": 3.760127931769723,
"grad_norm": 0.29486840541948817,
"learning_rate": 6.984968712927789e-06,
"loss": 0.2333,
"step": 881
},
{
"epoch": 3.7643923240938166,
"grad_norm": 0.2874118074599997,
"learning_rate": 6.939720300328303e-06,
"loss": 0.2518,
"step": 882
},
{
"epoch": 3.7686567164179103,
"grad_norm": 0.2683580455012857,
"learning_rate": 6.894588138316962e-06,
"loss": 0.2286,
"step": 883
},
{
"epoch": 3.772921108742004,
"grad_norm": 0.27089873290672944,
"learning_rate": 6.84957262861873e-06,
"loss": 0.2352,
"step": 884
},
{
"epoch": 3.777185501066098,
"grad_norm": 0.28794297921319983,
"learning_rate": 6.8046741719202385e-06,
"loss": 0.2435,
"step": 885
},
{
"epoch": 3.781449893390192,
"grad_norm": 0.29512771103869784,
"learning_rate": 6.7598931678662205e-06,
"loss": 0.2486,
"step": 886
},
{
"epoch": 3.7857142857142856,
"grad_norm": 0.27422515206444054,
"learning_rate": 6.7152300150559514e-06,
"loss": 0.2302,
"step": 887
},
{
"epoch": 3.7899786780383797,
"grad_norm": 0.28450358656356606,
"learning_rate": 6.67068511103971e-06,
"loss": 0.2641,
"step": 888
},
{
"epoch": 3.7942430703624734,
"grad_norm": 0.27481427773374834,
"learning_rate": 6.626258852315226e-06,
"loss": 0.2443,
"step": 889
},
{
"epoch": 3.798507462686567,
"grad_norm": 0.2924351460268186,
"learning_rate": 6.581951634324164e-06,
"loss": 0.2571,
"step": 890
},
{
"epoch": 3.802771855010661,
"grad_norm": 0.2790682450802608,
"learning_rate": 6.537763851448593e-06,
"loss": 0.23,
"step": 891
},
{
"epoch": 3.8070362473347545,
"grad_norm": 0.2930329051203839,
"learning_rate": 6.493695897007488e-06,
"loss": 0.2586,
"step": 892
},
{
"epoch": 3.8113006396588487,
"grad_norm": 0.28549744285559625,
"learning_rate": 6.4497481632532025e-06,
"loss": 0.2447,
"step": 893
},
{
"epoch": 3.8155650319829424,
"grad_norm": 0.29773280536775487,
"learning_rate": 6.4059210413680175e-06,
"loss": 0.2651,
"step": 894
},
{
"epoch": 3.819829424307036,
"grad_norm": 0.25471857385762836,
"learning_rate": 6.362214921460628e-06,
"loss": 0.2212,
"step": 895
},
{
"epoch": 3.8240938166311302,
"grad_norm": 0.2862555008696523,
"learning_rate": 6.318630192562685e-06,
"loss": 0.2438,
"step": 896
},
{
"epoch": 3.828358208955224,
"grad_norm": 0.2902843603482996,
"learning_rate": 6.275167242625331e-06,
"loss": 0.261,
"step": 897
},
{
"epoch": 3.8326226012793176,
"grad_norm": 0.2734916460539061,
"learning_rate": 6.231826458515728e-06,
"loss": 0.235,
"step": 898
},
{
"epoch": 3.836886993603412,
"grad_norm": 0.2959908993657831,
"learning_rate": 6.1886082260136486e-06,
"loss": 0.2674,
"step": 899
},
{
"epoch": 3.8411513859275055,
"grad_norm": 0.2629506747169864,
"learning_rate": 6.145512929808013e-06,
"loss": 0.2269,
"step": 900
},
{
"epoch": 3.845415778251599,
"grad_norm": 0.2796514994593026,
"learning_rate": 6.102540953493483e-06,
"loss": 0.2539,
"step": 901
},
{
"epoch": 3.849680170575693,
"grad_norm": 0.28247757622970854,
"learning_rate": 6.0596926795670445e-06,
"loss": 0.2505,
"step": 902
},
{
"epoch": 3.8539445628997866,
"grad_norm": 0.28546889184805163,
"learning_rate": 6.016968489424572e-06,
"loss": 0.239,
"step": 903
},
{
"epoch": 3.8582089552238807,
"grad_norm": 0.26614246481199944,
"learning_rate": 5.974368763357494e-06,
"loss": 0.2541,
"step": 904
},
{
"epoch": 3.8624733475479744,
"grad_norm": 0.270890457656894,
"learning_rate": 5.931893880549356e-06,
"loss": 0.2469,
"step": 905
},
{
"epoch": 3.866737739872068,
"grad_norm": 0.28586020360247877,
"learning_rate": 5.889544219072465e-06,
"loss": 0.2477,
"step": 906
},
{
"epoch": 3.8710021321961623,
"grad_norm": 0.2680248514296973,
"learning_rate": 5.847320155884535e-06,
"loss": 0.2302,
"step": 907
},
{
"epoch": 3.875266524520256,
"grad_norm": 0.25517931577869063,
"learning_rate": 5.805222066825296e-06,
"loss": 0.2215,
"step": 908
},
{
"epoch": 3.8795309168443497,
"grad_norm": 0.27280219353131735,
"learning_rate": 5.7632503266131925e-06,
"loss": 0.2317,
"step": 909
},
{
"epoch": 3.8837953091684434,
"grad_norm": 0.2716445422818712,
"learning_rate": 5.721405308842023e-06,
"loss": 0.2579,
"step": 910
},
{
"epoch": 3.888059701492537,
"grad_norm": 0.27313578866815935,
"learning_rate": 5.679687385977617e-06,
"loss": 0.2282,
"step": 911
},
{
"epoch": 3.8923240938166312,
"grad_norm": 0.2797256005452073,
"learning_rate": 5.638096929354522e-06,
"loss": 0.2157,
"step": 912
},
{
"epoch": 3.896588486140725,
"grad_norm": 0.2892205252889863,
"learning_rate": 5.596634309172704e-06,
"loss": 0.2571,
"step": 913
},
{
"epoch": 3.9008528784648187,
"grad_norm": 0.2701601040294437,
"learning_rate": 5.555299894494237e-06,
"loss": 0.2395,
"step": 914
},
{
"epoch": 3.905117270788913,
"grad_norm": 0.2601323072881841,
"learning_rate": 5.514094053240035e-06,
"loss": 0.2149,
"step": 915
},
{
"epoch": 3.9093816631130065,
"grad_norm": 0.32348241733730365,
"learning_rate": 5.47301715218657e-06,
"loss": 0.2475,
"step": 916
},
{
"epoch": 3.9136460554371,
"grad_norm": 0.2805297373975744,
"learning_rate": 5.432069556962602e-06,
"loss": 0.2275,
"step": 917
},
{
"epoch": 3.917910447761194,
"grad_norm": 0.2886640845993047,
"learning_rate": 5.39125163204594e-06,
"loss": 0.2384,
"step": 918
},
{
"epoch": 3.9221748400852876,
"grad_norm": 0.28537996894175865,
"learning_rate": 5.350563740760164e-06,
"loss": 0.2397,
"step": 919
},
{
"epoch": 3.9264392324093818,
"grad_norm": 0.3035362606109377,
"learning_rate": 5.31000624527144e-06,
"loss": 0.2383,
"step": 920
},
{
"epoch": 3.9307036247334755,
"grad_norm": 0.2998584944750757,
"learning_rate": 5.269579506585259e-06,
"loss": 0.2456,
"step": 921
},
{
"epoch": 3.934968017057569,
"grad_norm": 0.2843194883548224,
"learning_rate": 5.229283884543235e-06,
"loss": 0.2397,
"step": 922
},
{
"epoch": 3.9392324093816633,
"grad_norm": 0.27890127535725073,
"learning_rate": 5.189119737819912e-06,
"loss": 0.2538,
"step": 923
},
{
"epoch": 3.943496801705757,
"grad_norm": 0.2650375563185337,
"learning_rate": 5.149087423919541e-06,
"loss": 0.2119,
"step": 924
},
{
"epoch": 3.9477611940298507,
"grad_norm": 0.284810452643455,
"learning_rate": 5.109187299172938e-06,
"loss": 0.2668,
"step": 925
},
{
"epoch": 3.9520255863539444,
"grad_norm": 0.27726445813021233,
"learning_rate": 5.069419718734283e-06,
"loss": 0.241,
"step": 926
},
{
"epoch": 3.956289978678038,
"grad_norm": 0.28755904463832055,
"learning_rate": 5.029785036577976e-06,
"loss": 0.2693,
"step": 927
},
{
"epoch": 3.9605543710021323,
"grad_norm": 0.25514683912114794,
"learning_rate": 4.99028360549548e-06,
"loss": 0.1971,
"step": 928
},
{
"epoch": 3.964818763326226,
"grad_norm": 0.2956950707929946,
"learning_rate": 4.9509157770921625e-06,
"loss": 0.2491,
"step": 929
},
{
"epoch": 3.9690831556503197,
"grad_norm": 0.2767014770132041,
"learning_rate": 4.911681901784198e-06,
"loss": 0.2441,
"step": 930
},
{
"epoch": 3.973347547974414,
"grad_norm": 0.28218722265046,
"learning_rate": 4.87258232879543e-06,
"loss": 0.2531,
"step": 931
},
{
"epoch": 3.9776119402985075,
"grad_norm": 0.2724674673441032,
"learning_rate": 4.83361740615427e-06,
"loss": 0.2273,
"step": 932
},
{
"epoch": 3.981876332622601,
"grad_norm": 0.2738688947167063,
"learning_rate": 4.794787480690597e-06,
"loss": 0.2465,
"step": 933
},
{
"epoch": 3.9861407249466954,
"grad_norm": 0.27288919048483945,
"learning_rate": 4.7560928980326535e-06,
"loss": 0.2411,
"step": 934
},
{
"epoch": 3.990405117270789,
"grad_norm": 0.26906162947359014,
"learning_rate": 4.717534002604005e-06,
"loss": 0.2293,
"step": 935
},
{
"epoch": 3.9946695095948828,
"grad_norm": 0.2618466173879373,
"learning_rate": 4.679111137620442e-06,
"loss": 0.2064,
"step": 936
},
{
"epoch": 3.9989339019189765,
"grad_norm": 0.6242272523532433,
"learning_rate": 4.640824645086943e-06,
"loss": 0.4147,
"step": 937
},
{
"epoch": 4.004264392324094,
"grad_norm": 0.502124542538278,
"learning_rate": 4.6026748657946226e-06,
"loss": 0.182,
"step": 938
},
{
"epoch": 4.008528784648187,
"grad_norm": 0.40214808756394727,
"learning_rate": 4.5646621393177e-06,
"loss": 0.1878,
"step": 939
},
{
"epoch": 4.0127931769722816,
"grad_norm": 0.2964884308202411,
"learning_rate": 4.5267868040104765e-06,
"loss": 0.1827,
"step": 940
},
{
"epoch": 4.017057569296376,
"grad_norm": 0.379223019030444,
"learning_rate": 4.489049197004323e-06,
"loss": 0.1759,
"step": 941
},
{
"epoch": 4.021321961620469,
"grad_norm": 0.5008060151918551,
"learning_rate": 4.451449654204685e-06,
"loss": 0.1906,
"step": 942
},
{
"epoch": 4.025586353944563,
"grad_norm": 0.45022251052851464,
"learning_rate": 4.413988510288072e-06,
"loss": 0.1764,
"step": 943
},
{
"epoch": 4.029850746268656,
"grad_norm": 0.3183418187342008,
"learning_rate": 4.376666098699112e-06,
"loss": 0.177,
"step": 944
},
{
"epoch": 4.0341151385927505,
"grad_norm": 0.3522770365054266,
"learning_rate": 4.339482751647557e-06,
"loss": 0.1705,
"step": 945
},
{
"epoch": 4.038379530916845,
"grad_norm": 0.4263503005592876,
"learning_rate": 4.302438800105335e-06,
"loss": 0.188,
"step": 946
},
{
"epoch": 4.042643923240938,
"grad_norm": 0.3946522385281874,
"learning_rate": 4.265534573803607e-06,
"loss": 0.1986,
"step": 947
},
{
"epoch": 4.046908315565032,
"grad_norm": 0.31039206261225744,
"learning_rate": 4.228770401229824e-06,
"loss": 0.1807,
"step": 948
},
{
"epoch": 4.051172707889126,
"grad_norm": 0.2698934513861876,
"learning_rate": 4.1921466096248164e-06,
"loss": 0.1644,
"step": 949
},
{
"epoch": 4.0554371002132195,
"grad_norm": 0.3191366565183697,
"learning_rate": 4.155663524979854e-06,
"loss": 0.1818,
"step": 950
},
{
"epoch": 4.059701492537314,
"grad_norm": 0.3494685301529987,
"learning_rate": 4.119321472033779e-06,
"loss": 0.1853,
"step": 951
},
{
"epoch": 4.063965884861407,
"grad_norm": 0.3691215369886063,
"learning_rate": 4.083120774270095e-06,
"loss": 0.1892,
"step": 952
},
{
"epoch": 4.068230277185501,
"grad_norm": 0.30967940037401875,
"learning_rate": 4.04706175391409e-06,
"loss": 0.1862,
"step": 953
},
{
"epoch": 4.072494669509595,
"grad_norm": 0.28470587344113735,
"learning_rate": 4.011144731929981e-06,
"loss": 0.1635,
"step": 954
},
{
"epoch": 4.076759061833688,
"grad_norm": 0.28866644852025736,
"learning_rate": 3.975370028018022e-06,
"loss": 0.195,
"step": 955
},
{
"epoch": 4.081023454157783,
"grad_norm": 0.3005010899793622,
"learning_rate": 3.9397379606117045e-06,
"loss": 0.2031,
"step": 956
},
{
"epoch": 4.085287846481877,
"grad_norm": 0.3223639228910986,
"learning_rate": 3.904248846874894e-06,
"loss": 0.2033,
"step": 957
},
{
"epoch": 4.08955223880597,
"grad_norm": 0.2830213529504144,
"learning_rate": 3.868903002699014e-06,
"loss": 0.1836,
"step": 958
},
{
"epoch": 4.093816631130064,
"grad_norm": 0.27857117999872993,
"learning_rate": 3.833700742700237e-06,
"loss": 0.1682,
"step": 959
},
{
"epoch": 4.098081023454157,
"grad_norm": 0.28521159706834004,
"learning_rate": 3.7986423802166705e-06,
"loss": 0.1829,
"step": 960
},
{
"epoch": 4.1023454157782515,
"grad_norm": 0.31579283722941986,
"learning_rate": 3.7637282273055918e-06,
"loss": 0.1881,
"step": 961
},
{
"epoch": 4.106609808102346,
"grad_norm": 0.28461700269519835,
"learning_rate": 3.7289585947406504e-06,
"loss": 0.1681,
"step": 962
},
{
"epoch": 4.110874200426439,
"grad_norm": 0.2946870658948828,
"learning_rate": 3.694333792009115e-06,
"loss": 0.1707,
"step": 963
},
{
"epoch": 4.115138592750533,
"grad_norm": 0.26948058780522965,
"learning_rate": 3.6598541273091035e-06,
"loss": 0.2017,
"step": 964
},
{
"epoch": 4.119402985074627,
"grad_norm": 0.26347028329932537,
"learning_rate": 3.6255199075468595e-06,
"loss": 0.1742,
"step": 965
},
{
"epoch": 4.1236673773987205,
"grad_norm": 0.2655975197937126,
"learning_rate": 3.5913314383339937e-06,
"loss": 0.1903,
"step": 966
},
{
"epoch": 4.127931769722815,
"grad_norm": 0.28973082824861224,
"learning_rate": 3.5572890239847934e-06,
"loss": 0.19,
"step": 967
},
{
"epoch": 4.132196162046908,
"grad_norm": 0.28373915001437056,
"learning_rate": 3.5233929675134993e-06,
"loss": 0.1668,
"step": 968
},
{
"epoch": 4.136460554371002,
"grad_norm": 0.27968718279785926,
"learning_rate": 3.4896435706316e-06,
"loss": 0.1792,
"step": 969
},
{
"epoch": 4.140724946695096,
"grad_norm": 0.2684912141931692,
"learning_rate": 3.456041133745163e-06,
"loss": 0.1859,
"step": 970
},
{
"epoch": 4.144989339019189,
"grad_norm": 0.25671222298384944,
"learning_rate": 3.4225859559521536e-06,
"loss": 0.1943,
"step": 971
},
{
"epoch": 4.149253731343284,
"grad_norm": 0.2941742818421993,
"learning_rate": 3.3892783350397675e-06,
"loss": 0.1804,
"step": 972
},
{
"epoch": 4.153518123667378,
"grad_norm": 0.26842044384478514,
"learning_rate": 3.356118567481785e-06,
"loss": 0.1875,
"step": 973
},
{
"epoch": 4.157782515991471,
"grad_norm": 0.27257574650490113,
"learning_rate": 3.3231069484359367e-06,
"loss": 0.1614,
"step": 974
},
{
"epoch": 4.162046908315565,
"grad_norm": 0.26576435827231454,
"learning_rate": 3.290243771741275e-06,
"loss": 0.2,
"step": 975
},
{
"epoch": 4.166311300639659,
"grad_norm": 0.28364887161883395,
"learning_rate": 3.2575293299155343e-06,
"loss": 0.177,
"step": 976
},
{
"epoch": 4.1705756929637525,
"grad_norm": 0.2648805323845748,
"learning_rate": 3.2249639141525724e-06,
"loss": 0.1761,
"step": 977
},
{
"epoch": 4.174840085287847,
"grad_norm": 0.2772188319946492,
"learning_rate": 3.1925478143197418e-06,
"loss": 0.1652,
"step": 978
},
{
"epoch": 4.17910447761194,
"grad_norm": 0.24545620530472215,
"learning_rate": 3.160281318955325e-06,
"loss": 0.182,
"step": 979
},
{
"epoch": 4.183368869936034,
"grad_norm": 0.25783169525362326,
"learning_rate": 3.1281647152659687e-06,
"loss": 0.1783,
"step": 980
},
{
"epoch": 4.187633262260128,
"grad_norm": 0.26773304598461506,
"learning_rate": 3.0961982891241083e-06,
"loss": 0.1845,
"step": 981
},
{
"epoch": 4.1918976545842215,
"grad_norm": 0.2634763485660847,
"learning_rate": 3.0643823250654447e-06,
"loss": 0.1883,
"step": 982
},
{
"epoch": 4.196162046908316,
"grad_norm": 0.27160922729899195,
"learning_rate": 3.032717106286409e-06,
"loss": 0.1804,
"step": 983
},
{
"epoch": 4.20042643923241,
"grad_norm": 0.28052463542160555,
"learning_rate": 3.001202914641628e-06,
"loss": 0.1876,
"step": 984
},
{
"epoch": 4.204690831556503,
"grad_norm": 0.27189232529742935,
"learning_rate": 2.9698400306414353e-06,
"loss": 0.1847,
"step": 985
},
{
"epoch": 4.208955223880597,
"grad_norm": 0.261299366875251,
"learning_rate": 2.9386287334493426e-06,
"loss": 0.1674,
"step": 986
},
{
"epoch": 4.21321961620469,
"grad_norm": 0.2685084307301793,
"learning_rate": 2.907569300879596e-06,
"loss": 0.1815,
"step": 987
},
{
"epoch": 4.217484008528785,
"grad_norm": 0.2534128060130963,
"learning_rate": 2.876662009394673e-06,
"loss": 0.1924,
"step": 988
},
{
"epoch": 4.221748400852879,
"grad_norm": 0.25673713632277023,
"learning_rate": 2.8459071341028323e-06,
"loss": 0.1912,
"step": 989
},
{
"epoch": 4.226012793176972,
"grad_norm": 0.2468883201944162,
"learning_rate": 2.815304948755664e-06,
"loss": 0.1779,
"step": 990
},
{
"epoch": 4.230277185501066,
"grad_norm": 0.25408696538411385,
"learning_rate": 2.784855725745661e-06,
"loss": 0.1872,
"step": 991
},
{
"epoch": 4.23454157782516,
"grad_norm": 0.2781469282460988,
"learning_rate": 2.7545597361037657e-06,
"loss": 0.1934,
"step": 992
},
{
"epoch": 4.2388059701492535,
"grad_norm": 0.2615576306598456,
"learning_rate": 2.7244172494969978e-06,
"loss": 0.1935,
"step": 993
},
{
"epoch": 4.243070362473348,
"grad_norm": 0.2639612644746222,
"learning_rate": 2.6944285342260277e-06,
"loss": 0.1746,
"step": 994
},
{
"epoch": 4.247334754797441,
"grad_norm": 0.2490668697614864,
"learning_rate": 2.6645938572227946e-06,
"loss": 0.1745,
"step": 995
},
{
"epoch": 4.251599147121535,
"grad_norm": 0.25084455227336827,
"learning_rate": 2.6349134840481294e-06,
"loss": 0.1826,
"step": 996
},
{
"epoch": 4.255863539445629,
"grad_norm": 0.25951044293340914,
"learning_rate": 2.6053876788893952e-06,
"loss": 0.1765,
"step": 997
},
{
"epoch": 4.2601279317697225,
"grad_norm": 0.2616833813932719,
"learning_rate": 2.5760167045581308e-06,
"loss": 0.1779,
"step": 998
},
{
"epoch": 4.264392324093817,
"grad_norm": 0.2683034823812298,
"learning_rate": 2.546800822487714e-06,
"loss": 0.178,
"step": 999
},
{
"epoch": 4.268656716417911,
"grad_norm": 0.25617069908219275,
"learning_rate": 2.5177402927310344e-06,
"loss": 0.1685,
"step": 1000
},
{
"epoch": 4.272921108742004,
"grad_norm": 0.2625568533886325,
"learning_rate": 2.488835373958185e-06,
"loss": 0.183,
"step": 1001
},
{
"epoch": 4.277185501066098,
"grad_norm": 0.2526196797026667,
"learning_rate": 2.4600863234541338e-06,
"loss": 0.1788,
"step": 1002
},
{
"epoch": 4.281449893390192,
"grad_norm": 0.2717109859468861,
"learning_rate": 2.4314933971164734e-06,
"loss": 0.1838,
"step": 1003
},
{
"epoch": 4.285714285714286,
"grad_norm": 0.2596571713184107,
"learning_rate": 2.4030568494531135e-06,
"loss": 0.1744,
"step": 1004
},
{
"epoch": 4.28997867803838,
"grad_norm": 0.25389529145774403,
"learning_rate": 2.374776933580025e-06,
"loss": 0.1912,
"step": 1005
},
{
"epoch": 4.294243070362473,
"grad_norm": 0.24747405968954783,
"learning_rate": 2.3466539012189913e-06,
"loss": 0.1684,
"step": 1006
},
{
"epoch": 4.298507462686567,
"grad_norm": 0.26068317199815344,
"learning_rate": 2.318688002695355e-06,
"loss": 0.184,
"step": 1007
},
{
"epoch": 4.302771855010661,
"grad_norm": 0.2476855542290121,
"learning_rate": 2.2908794869358044e-06,
"loss": 0.1831,
"step": 1008
},
{
"epoch": 4.3070362473347545,
"grad_norm": 0.2541200698145374,
"learning_rate": 2.2632286014661477e-06,
"loss": 0.1904,
"step": 1009
},
{
"epoch": 4.311300639658849,
"grad_norm": 0.2504818694519344,
"learning_rate": 2.2357355924091207e-06,
"loss": 0.1588,
"step": 1010
},
{
"epoch": 4.315565031982943,
"grad_norm": 0.25158539609157676,
"learning_rate": 2.2084007044821764e-06,
"loss": 0.1805,
"step": 1011
},
{
"epoch": 4.319829424307036,
"grad_norm": 0.2550618363663431,
"learning_rate": 2.181224180995336e-06,
"loss": 0.1896,
"step": 1012
},
{
"epoch": 4.32409381663113,
"grad_norm": 0.2547599198922931,
"learning_rate": 2.154206263848986e-06,
"loss": 0.1691,
"step": 1013
},
{
"epoch": 4.3283582089552235,
"grad_norm": 0.2529811695628979,
"learning_rate": 2.127347193531757e-06,
"loss": 0.197,
"step": 1014
},
{
"epoch": 4.332622601279318,
"grad_norm": 0.26719444732853537,
"learning_rate": 2.1006472091183715e-06,
"loss": 0.1767,
"step": 1015
},
{
"epoch": 4.336886993603412,
"grad_norm": 0.25726098495581406,
"learning_rate": 2.074106548267516e-06,
"loss": 0.1745,
"step": 1016
},
{
"epoch": 4.341151385927505,
"grad_norm": 0.2377016385868149,
"learning_rate": 2.0477254472197237e-06,
"loss": 0.1743,
"step": 1017
},
{
"epoch": 4.345415778251599,
"grad_norm": 0.24345600501457476,
"learning_rate": 2.021504140795265e-06,
"loss": 0.1734,
"step": 1018
},
{
"epoch": 4.349680170575693,
"grad_norm": 0.2461514291416044,
"learning_rate": 1.995442862392081e-06,
"loss": 0.1975,
"step": 1019
},
{
"epoch": 4.353944562899787,
"grad_norm": 0.24447636461188885,
"learning_rate": 1.96954184398368e-06,
"loss": 0.1745,
"step": 1020
},
{
"epoch": 4.358208955223881,
"grad_norm": 0.2406706927340542,
"learning_rate": 1.943801316117089e-06,
"loss": 0.1663,
"step": 1021
},
{
"epoch": 4.362473347547974,
"grad_norm": 0.24261079701896185,
"learning_rate": 1.918221507910789e-06,
"loss": 0.1756,
"step": 1022
},
{
"epoch": 4.366737739872068,
"grad_norm": 0.2560728213012035,
"learning_rate": 1.8928026470526917e-06,
"loss": 0.1909,
"step": 1023
},
{
"epoch": 4.371002132196162,
"grad_norm": 0.24951492778723952,
"learning_rate": 1.8675449597980978e-06,
"loss": 0.1906,
"step": 1024
},
{
"epoch": 4.3752665245202556,
"grad_norm": 0.2476830068344565,
"learning_rate": 1.842448670967687e-06,
"loss": 0.166,
"step": 1025
},
{
"epoch": 4.37953091684435,
"grad_norm": 0.25954959096851477,
"learning_rate": 1.817514003945524e-06,
"loss": 0.1694,
"step": 1026
},
{
"epoch": 4.383795309168444,
"grad_norm": 0.26570027932355017,
"learning_rate": 1.792741180677069e-06,
"loss": 0.2076,
"step": 1027
},
{
"epoch": 4.388059701492537,
"grad_norm": 0.25639086209798745,
"learning_rate": 1.76813042166718e-06,
"loss": 0.1793,
"step": 1028
},
{
"epoch": 4.392324093816631,
"grad_norm": 0.2564487505775299,
"learning_rate": 1.743681945978184e-06,
"loss": 0.1769,
"step": 1029
},
{
"epoch": 4.396588486140725,
"grad_norm": 0.25370708499935873,
"learning_rate": 1.7193959712279106e-06,
"loss": 0.1768,
"step": 1030
},
{
"epoch": 4.400852878464819,
"grad_norm": 0.24834886176780574,
"learning_rate": 1.6952727135877501e-06,
"loss": 0.1813,
"step": 1031
},
{
"epoch": 4.405117270788913,
"grad_norm": 0.26274943101751796,
"learning_rate": 1.6713123877807413e-06,
"loss": 0.1729,
"step": 1032
},
{
"epoch": 4.409381663113006,
"grad_norm": 0.24031929989576914,
"learning_rate": 1.6475152070796396e-06,
"loss": 0.184,
"step": 1033
},
{
"epoch": 4.4136460554371,
"grad_norm": 0.2559954382971304,
"learning_rate": 1.6238813833050504e-06,
"loss": 0.1836,
"step": 1034
},
{
"epoch": 4.417910447761194,
"grad_norm": 0.24487155650329637,
"learning_rate": 1.6004111268235156e-06,
"loss": 0.2054,
"step": 1035
},
{
"epoch": 4.422174840085288,
"grad_norm": 0.2420735342734897,
"learning_rate": 1.5771046465456574e-06,
"loss": 0.1739,
"step": 1036
},
{
"epoch": 4.426439232409382,
"grad_norm": 0.2608216346583349,
"learning_rate": 1.5539621499243064e-06,
"loss": 0.182,
"step": 1037
},
{
"epoch": 4.430703624733475,
"grad_norm": 0.2482170636749505,
"learning_rate": 1.5309838429526714e-06,
"loss": 0.2013,
"step": 1038
},
{
"epoch": 4.434968017057569,
"grad_norm": 0.25334395308346525,
"learning_rate": 1.5081699301624819e-06,
"loss": 0.1771,
"step": 1039
},
{
"epoch": 4.439232409381663,
"grad_norm": 0.2564039736071258,
"learning_rate": 1.4855206146221934e-06,
"loss": 0.1826,
"step": 1040
},
{
"epoch": 4.443496801705757,
"grad_norm": 0.2604551013420376,
"learning_rate": 1.4630360979351644e-06,
"loss": 0.1748,
"step": 1041
},
{
"epoch": 4.447761194029851,
"grad_norm": 0.2503977756454165,
"learning_rate": 1.4407165802378664e-06,
"loss": 0.1891,
"step": 1042
},
{
"epoch": 4.452025586353945,
"grad_norm": 0.2743350520560739,
"learning_rate": 1.4185622601981042e-06,
"loss": 0.1844,
"step": 1043
},
{
"epoch": 4.456289978678038,
"grad_norm": 0.2692092257511118,
"learning_rate": 1.396573335013236e-06,
"loss": 0.186,
"step": 1044
},
{
"epoch": 4.460554371002132,
"grad_norm": 0.3262914630858915,
"learning_rate": 1.374750000408438e-06,
"loss": 0.1894,
"step": 1045
},
{
"epoch": 4.464818763326226,
"grad_norm": 0.25490515346975273,
"learning_rate": 1.353092450634943e-06,
"loss": 0.1781,
"step": 1046
},
{
"epoch": 4.46908315565032,
"grad_norm": 0.25118910970659714,
"learning_rate": 1.3316008784683265e-06,
"loss": 0.1955,
"step": 1047
},
{
"epoch": 4.473347547974414,
"grad_norm": 0.24598518384836726,
"learning_rate": 1.3102754752067792e-06,
"loss": 0.1787,
"step": 1048
},
{
"epoch": 4.477611940298507,
"grad_norm": 0.2608257108213762,
"learning_rate": 1.2891164306694148e-06,
"loss": 0.185,
"step": 1049
},
{
"epoch": 4.481876332622601,
"grad_norm": 0.2576580996322884,
"learning_rate": 1.2681239331945695e-06,
"loss": 0.1917,
"step": 1050
},
{
"epoch": 4.486140724946695,
"grad_norm": 0.2504942501895405,
"learning_rate": 1.2472981696381315e-06,
"loss": 0.1828,
"step": 1051
},
{
"epoch": 4.490405117270789,
"grad_norm": 0.2536034604981403,
"learning_rate": 1.2266393253718812e-06,
"loss": 0.1951,
"step": 1052
},
{
"epoch": 4.494669509594883,
"grad_norm": 0.2525231036260257,
"learning_rate": 1.2061475842818337e-06,
"loss": 0.1762,
"step": 1053
},
{
"epoch": 4.498933901918977,
"grad_norm": 0.25440738093351306,
"learning_rate": 1.185823128766601e-06,
"loss": 0.1809,
"step": 1054
},
{
"epoch": 4.50319829424307,
"grad_norm": 0.24386942032621886,
"learning_rate": 1.1656661397357815e-06,
"loss": 0.1674,
"step": 1055
},
{
"epoch": 4.507462686567164,
"grad_norm": 0.2415595398325273,
"learning_rate": 1.1456767966083393e-06,
"loss": 0.1723,
"step": 1056
},
{
"epoch": 4.5117270788912585,
"grad_norm": 0.2511080328869282,
"learning_rate": 1.1258552773110031e-06,
"loss": 0.1729,
"step": 1057
},
{
"epoch": 4.515991471215352,
"grad_norm": 0.2502308872486097,
"learning_rate": 1.106201758276697e-06,
"loss": 0.1752,
"step": 1058
},
{
"epoch": 4.520255863539446,
"grad_norm": 0.2552077136895359,
"learning_rate": 1.086716414442952e-06,
"loss": 0.1796,
"step": 1059
},
{
"epoch": 4.524520255863539,
"grad_norm": 0.23941103130674032,
"learning_rate": 1.0673994192503633e-06,
"loss": 0.169,
"step": 1060
},
{
"epoch": 4.528784648187633,
"grad_norm": 0.2568336996961339,
"learning_rate": 1.048250944641045e-06,
"loss": 0.1826,
"step": 1061
},
{
"epoch": 4.533049040511727,
"grad_norm": 0.25307944722264636,
"learning_rate": 1.0292711610570904e-06,
"loss": 0.1892,
"step": 1062
},
{
"epoch": 4.537313432835821,
"grad_norm": 0.25965249103962207,
"learning_rate": 1.0104602374390593e-06,
"loss": 0.1703,
"step": 1063
},
{
"epoch": 4.541577825159915,
"grad_norm": 0.24426354335989117,
"learning_rate": 9.918183412244863e-07,
"loss": 0.1771,
"step": 1064
},
{
"epoch": 4.545842217484008,
"grad_norm": 0.2645766856661182,
"learning_rate": 9.733456383463658e-07,
"loss": 0.1809,
"step": 1065
},
{
"epoch": 4.550106609808102,
"grad_norm": 0.25999228059596646,
"learning_rate": 9.550422932316938e-07,
"loss": 0.1882,
"step": 1066
},
{
"epoch": 4.554371002132196,
"grad_norm": 0.2588645606625913,
"learning_rate": 9.369084688000063e-07,
"loss": 0.2033,
"step": 1067
},
{
"epoch": 4.55863539445629,
"grad_norm": 0.24825190481868026,
"learning_rate": 9.189443264619102e-07,
"loss": 0.1724,
"step": 1068
},
{
"epoch": 4.562899786780384,
"grad_norm": 0.23615188533726575,
"learning_rate": 9.011500261176742e-07,
"loss": 0.1894,
"step": 1069
},
{
"epoch": 4.567164179104478,
"grad_norm": 0.2625288054723175,
"learning_rate": 8.835257261557695e-07,
"loss": 0.1835,
"step": 1070
},
{
"epoch": 4.571428571428571,
"grad_norm": 0.24898546589666998,
"learning_rate": 8.660715834514977e-07,
"loss": 0.1734,
"step": 1071
},
{
"epoch": 4.575692963752665,
"grad_norm": 0.24334105931233999,
"learning_rate": 8.487877533655698e-07,
"loss": 0.1796,
"step": 1072
},
{
"epoch": 4.5799573560767595,
"grad_norm": 0.2636363968555462,
"learning_rate": 8.316743897427315e-07,
"loss": 0.1705,
"step": 1073
},
{
"epoch": 4.584221748400853,
"grad_norm": 0.241712043228359,
"learning_rate": 8.147316449103959e-07,
"loss": 0.1795,
"step": 1074
},
{
"epoch": 4.588486140724947,
"grad_norm": 0.2416859186454114,
"learning_rate": 7.979596696772773e-07,
"loss": 0.1888,
"step": 1075
},
{
"epoch": 4.59275053304904,
"grad_norm": 0.2549025323127069,
"learning_rate": 7.813586133320661e-07,
"loss": 0.193,
"step": 1076
},
{
"epoch": 4.597014925373134,
"grad_norm": 0.2595086426061621,
"learning_rate": 7.649286236420806e-07,
"loss": 0.1924,
"step": 1077
},
{
"epoch": 4.601279317697228,
"grad_norm": 0.2517909149989371,
"learning_rate": 7.486698468519682e-07,
"loss": 0.184,
"step": 1078
},
{
"epoch": 4.605543710021322,
"grad_norm": 0.26019528159860256,
"learning_rate": 7.325824276823934e-07,
"loss": 0.175,
"step": 1079
},
{
"epoch": 4.609808102345416,
"grad_norm": 0.26605256455150367,
"learning_rate": 7.166665093287539e-07,
"loss": 0.1747,
"step": 1080
},
{
"epoch": 4.61407249466951,
"grad_norm": 0.2830200475494057,
"learning_rate": 7.009222334599019e-07,
"loss": 0.1788,
"step": 1081
},
{
"epoch": 4.618336886993603,
"grad_norm": 0.2467861629974839,
"learning_rate": 6.853497402168896e-07,
"loss": 0.1799,
"step": 1082
},
{
"epoch": 4.622601279317697,
"grad_norm": 0.23737390883921275,
"learning_rate": 6.69949168211721e-07,
"loss": 0.1917,
"step": 1083
},
{
"epoch": 4.6268656716417915,
"grad_norm": 0.253351168991941,
"learning_rate": 6.547206545261109e-07,
"loss": 0.1886,
"step": 1084
},
{
"epoch": 4.631130063965885,
"grad_norm": 0.25325937730860365,
"learning_rate": 6.396643347102683e-07,
"loss": 0.1834,
"step": 1085
},
{
"epoch": 4.635394456289979,
"grad_norm": 0.2614575479574234,
"learning_rate": 6.247803427816945e-07,
"loss": 0.1809,
"step": 1086
},
{
"epoch": 4.639658848614072,
"grad_norm": 0.23936509091432043,
"learning_rate": 6.10068811223985e-07,
"loss": 0.1807,
"step": 1087
},
{
"epoch": 4.643923240938166,
"grad_norm": 0.2503677362313397,
"learning_rate": 5.955298709856516e-07,
"loss": 0.1844,
"step": 1088
},
{
"epoch": 4.6481876332622605,
"grad_norm": 0.24342072360801173,
"learning_rate": 5.811636514789598e-07,
"loss": 0.1772,
"step": 1089
},
{
"epoch": 4.652452025586354,
"grad_norm": 0.24539213277480382,
"learning_rate": 5.669702805787714e-07,
"loss": 0.1989,
"step": 1090
},
{
"epoch": 4.656716417910448,
"grad_norm": 0.2652265147460065,
"learning_rate": 5.529498846214054e-07,
"loss": 0.1863,
"step": 1091
},
{
"epoch": 4.660980810234541,
"grad_norm": 0.2539211580897812,
"learning_rate": 5.391025884035239e-07,
"loss": 0.1677,
"step": 1092
},
{
"epoch": 4.665245202558635,
"grad_norm": 0.24704196039425894,
"learning_rate": 5.254285151810124e-07,
"loss": 0.1751,
"step": 1093
},
{
"epoch": 4.669509594882729,
"grad_norm": 0.2560366885967282,
"learning_rate": 5.119277866678829e-07,
"loss": 0.1935,
"step": 1094
},
{
"epoch": 4.673773987206823,
"grad_norm": 0.24482466047481527,
"learning_rate": 4.986005230351954e-07,
"loss": 0.184,
"step": 1095
},
{
"epoch": 4.678038379530917,
"grad_norm": 0.2364984365035076,
"learning_rate": 4.854468429099801e-07,
"loss": 0.1735,
"step": 1096
},
{
"epoch": 4.682302771855011,
"grad_norm": 0.251430758880685,
"learning_rate": 4.7246686337419247e-07,
"loss": 0.1765,
"step": 1097
},
{
"epoch": 4.686567164179104,
"grad_norm": 0.25154556154939406,
"learning_rate": 4.5966069996365993e-07,
"loss": 0.2019,
"step": 1098
},
{
"epoch": 4.690831556503198,
"grad_norm": 0.25294495835219155,
"learning_rate": 4.470284666670632e-07,
"loss": 0.1736,
"step": 1099
},
{
"epoch": 4.6950959488272925,
"grad_norm": 0.2592829877544315,
"learning_rate": 4.345702759249171e-07,
"loss": 0.1636,
"step": 1100
},
{
"epoch": 4.699360341151386,
"grad_norm": 0.26789209418974264,
"learning_rate": 4.22286238628562e-07,
"loss": 0.1911,
"step": 1101
},
{
"epoch": 4.70362473347548,
"grad_norm": 0.24541428505242718,
"learning_rate": 4.101764641191963e-07,
"loss": 0.1851,
"step": 1102
},
{
"epoch": 4.707889125799573,
"grad_norm": 0.2503897444579661,
"learning_rate": 3.982410601868858e-07,
"loss": 0.1791,
"step": 1103
},
{
"epoch": 4.712153518123667,
"grad_norm": 0.2518436880411174,
"learning_rate": 3.8648013306960664e-07,
"loss": 0.1772,
"step": 1104
},
{
"epoch": 4.7164179104477615,
"grad_norm": 0.2541166033719267,
"learning_rate": 3.748937874523062e-07,
"loss": 0.1687,
"step": 1105
},
{
"epoch": 4.720682302771855,
"grad_norm": 0.2474483136739983,
"learning_rate": 3.634821264659727e-07,
"loss": 0.168,
"step": 1106
},
{
"epoch": 4.724946695095949,
"grad_norm": 0.2497137197767489,
"learning_rate": 3.522452516867048e-07,
"loss": 0.1805,
"step": 1107
},
{
"epoch": 4.729211087420042,
"grad_norm": 0.2519870636421656,
"learning_rate": 3.4118326313481887e-07,
"loss": 0.1812,
"step": 1108
},
{
"epoch": 4.733475479744136,
"grad_norm": 0.26608759792221703,
"learning_rate": 3.3029625927395446e-07,
"loss": 0.1812,
"step": 1109
},
{
"epoch": 4.73773987206823,
"grad_norm": 0.2532206981956829,
"learning_rate": 3.1958433701019697e-07,
"loss": 0.1882,
"step": 1110
},
{
"epoch": 4.742004264392325,
"grad_norm": 0.24329576225972888,
"learning_rate": 3.09047591691225e-07,
"loss": 0.1605,
"step": 1111
},
{
"epoch": 4.746268656716418,
"grad_norm": 0.25515549709266605,
"learning_rate": 2.9868611710543785e-07,
"loss": 0.1673,
"step": 1112
},
{
"epoch": 4.750533049040512,
"grad_norm": 0.24876718599525113,
"learning_rate": 2.8850000548115155e-07,
"loss": 0.1841,
"step": 1113
},
{
"epoch": 4.754797441364605,
"grad_norm": 0.2510551497387393,
"learning_rate": 2.7848934748574863e-07,
"loss": 0.2004,
"step": 1114
},
{
"epoch": 4.759061833688699,
"grad_norm": 0.24763934130476553,
"learning_rate": 2.686542322248986e-07,
"loss": 0.1912,
"step": 1115
},
{
"epoch": 4.7633262260127935,
"grad_norm": 0.25906371020191316,
"learning_rate": 2.5899474724174313e-07,
"loss": 0.1726,
"step": 1116
},
{
"epoch": 4.767590618336887,
"grad_norm": 0.26739724085295546,
"learning_rate": 2.495109785161232e-07,
"loss": 0.1842,
"step": 1117
},
{
"epoch": 4.771855010660981,
"grad_norm": 0.2547089358431073,
"learning_rate": 2.402030104638198e-07,
"loss": 0.1869,
"step": 1118
},
{
"epoch": 4.776119402985074,
"grad_norm": 0.24268462946214736,
"learning_rate": 2.3107092593579905e-07,
"loss": 0.1925,
"step": 1119
},
{
"epoch": 4.780383795309168,
"grad_norm": 0.2542924795251583,
"learning_rate": 2.2211480621746828e-07,
"loss": 0.1822,
"step": 1120
},
{
"epoch": 4.7846481876332625,
"grad_norm": 0.2464947967335171,
"learning_rate": 2.133347310279632e-07,
"loss": 0.1845,
"step": 1121
},
{
"epoch": 4.788912579957356,
"grad_norm": 0.24770021042648263,
"learning_rate": 2.0473077851942858e-07,
"loss": 0.1832,
"step": 1122
},
{
"epoch": 4.79317697228145,
"grad_norm": 0.25095916292893766,
"learning_rate": 1.9630302527633427e-07,
"loss": 0.19,
"step": 1123
},
{
"epoch": 4.797441364605544,
"grad_norm": 0.2441082841134644,
"learning_rate": 1.8805154631477808e-07,
"loss": 0.1985,
"step": 1124
},
{
"epoch": 4.801705756929637,
"grad_norm": 0.24377040955638918,
"learning_rate": 1.799764150818306e-07,
"loss": 0.1889,
"step": 1125
},
{
"epoch": 4.8059701492537314,
"grad_norm": 0.23922657574755457,
"learning_rate": 1.7207770345488263e-07,
"loss": 0.2025,
"step": 1126
},
{
"epoch": 4.810234541577826,
"grad_norm": 0.24300973562859218,
"learning_rate": 1.6435548174098536e-07,
"loss": 0.1651,
"step": 1127
},
{
"epoch": 4.814498933901919,
"grad_norm": 0.250775024372348,
"learning_rate": 1.5680981867625566e-07,
"loss": 0.1999,
"step": 1128
},
{
"epoch": 4.818763326226013,
"grad_norm": 0.24745461516925246,
"learning_rate": 1.4944078142523854e-07,
"loss": 0.1882,
"step": 1129
},
{
"epoch": 4.823027718550106,
"grad_norm": 0.2419131887023878,
"learning_rate": 1.4224843558031888e-07,
"loss": 0.1663,
"step": 1130
},
{
"epoch": 4.8272921108742,
"grad_norm": 0.24717510676764287,
"learning_rate": 1.3523284516113955e-07,
"loss": 0.1822,
"step": 1131
},
{
"epoch": 4.8315565031982945,
"grad_norm": 0.24718117512657953,
"learning_rate": 1.2839407261403091e-07,
"loss": 0.1883,
"step": 1132
},
{
"epoch": 4.835820895522388,
"grad_norm": 0.2569349094275073,
"learning_rate": 1.2173217881144894e-07,
"loss": 0.1968,
"step": 1133
},
{
"epoch": 4.840085287846482,
"grad_norm": 0.24347494132677133,
"learning_rate": 1.1524722305144231e-07,
"loss": 0.1906,
"step": 1134
},
{
"epoch": 4.844349680170575,
"grad_norm": 0.24501684057809445,
"learning_rate": 1.0893926305711289e-07,
"loss": 0.1822,
"step": 1135
},
{
"epoch": 4.848614072494669,
"grad_norm": 0.2588930031400982,
"learning_rate": 1.0280835497612052e-07,
"loss": 0.1701,
"step": 1136
},
{
"epoch": 4.8528784648187635,
"grad_norm": 0.23483922876779595,
"learning_rate": 9.685455338016347e-08,
"loss": 0.1945,
"step": 1137
},
{
"epoch": 4.857142857142857,
"grad_norm": 0.24724560647261207,
"learning_rate": 9.107791126450106e-08,
"loss": 0.1802,
"step": 1138
},
{
"epoch": 4.861407249466951,
"grad_norm": 0.26145293127090224,
"learning_rate": 8.547848004748505e-08,
"loss": 0.1687,
"step": 1139
},
{
"epoch": 4.865671641791045,
"grad_norm": 0.23391703784279536,
"learning_rate": 8.005630957010014e-08,
"loss": 0.1779,
"step": 1140
},
{
"epoch": 4.869936034115138,
"grad_norm": 0.2489454120460888,
"learning_rate": 7.481144809551311e-08,
"loss": 0.1659,
"step": 1141
},
{
"epoch": 4.8742004264392325,
"grad_norm": 0.2487457225486811,
"learning_rate": 6.974394230865766e-08,
"loss": 0.1673,
"step": 1142
},
{
"epoch": 4.878464818763327,
"grad_norm": 0.23375974218421045,
"learning_rate": 6.485383731580142e-08,
"loss": 0.1815,
"step": 1143
},
{
"epoch": 4.88272921108742,
"grad_norm": 0.23773610901652706,
"learning_rate": 6.014117664415953e-08,
"loss": 0.1667,
"step": 1144
},
{
"epoch": 4.886993603411514,
"grad_norm": 0.2418877562825462,
"learning_rate": 5.560600224149948e-08,
"loss": 0.1848,
"step": 1145
},
{
"epoch": 4.891257995735607,
"grad_norm": 0.2509736533987665,
"learning_rate": 5.1248354475768034e-08,
"loss": 0.1977,
"step": 1146
},
{
"epoch": 4.895522388059701,
"grad_norm": 0.24187318770204605,
"learning_rate": 4.706827213473819e-08,
"loss": 0.197,
"step": 1147
},
{
"epoch": 4.899786780383796,
"grad_norm": 0.25000706255404975,
"learning_rate": 4.3065792425651675e-08,
"loss": 0.1752,
"step": 1148
},
{
"epoch": 4.904051172707889,
"grad_norm": 0.2480693668890402,
"learning_rate": 3.924095097489922e-08,
"loss": 0.1769,
"step": 1149
},
{
"epoch": 4.908315565031983,
"grad_norm": 0.2539622681438357,
"learning_rate": 3.559378182770079e-08,
"loss": 0.1857,
"step": 1150
},
{
"epoch": 4.912579957356077,
"grad_norm": 0.2385544079469686,
"learning_rate": 3.212431744779476e-08,
"loss": 0.1762,
"step": 1151
},
{
"epoch": 4.91684434968017,
"grad_norm": 0.2576425107348214,
"learning_rate": 2.8832588717164766e-08,
"loss": 0.1852,
"step": 1152
},
{
"epoch": 4.9211087420042645,
"grad_norm": 0.24173764751107737,
"learning_rate": 2.5718624935744395e-08,
"loss": 0.1806,
"step": 1153
},
{
"epoch": 4.925373134328359,
"grad_norm": 0.2558027038778884,
"learning_rate": 2.2782453821177386e-08,
"loss": 0.1905,
"step": 1154
},
{
"epoch": 4.929637526652452,
"grad_norm": 0.2520433231603843,
"learning_rate": 2.0024101508555604e-08,
"loss": 0.1682,
"step": 1155
},
{
"epoch": 4.933901918976546,
"grad_norm": 0.24293952334741303,
"learning_rate": 1.7443592550190346e-08,
"loss": 0.1802,
"step": 1156
},
{
"epoch": 4.938166311300639,
"grad_norm": 0.25128505428584746,
"learning_rate": 1.5040949915399173e-08,
"loss": 0.1896,
"step": 1157
},
{
"epoch": 4.9424307036247335,
"grad_norm": 0.25347230511353935,
"learning_rate": 1.281619499029274e-08,
"loss": 0.1926,
"step": 1158
},
{
"epoch": 4.946695095948828,
"grad_norm": 0.24720562161341594,
"learning_rate": 1.0769347577594958e-08,
"loss": 0.1852,
"step": 1159
},
{
"epoch": 4.950959488272921,
"grad_norm": 0.24835048400025717,
"learning_rate": 8.900425896454234e-09,
"loss": 0.1724,
"step": 1160
},
{
"epoch": 4.955223880597015,
"grad_norm": 0.25080539790226697,
"learning_rate": 7.209446582292501e-09,
"loss": 0.1983,
"step": 1161
},
{
"epoch": 4.959488272921108,
"grad_norm": 0.24987301745644983,
"learning_rate": 5.6964246866497705e-09,
"loss": 0.1728,
"step": 1162
},
{
"epoch": 4.963752665245202,
"grad_norm": 0.24407422413019816,
"learning_rate": 4.361373677055358e-09,
"loss": 0.1821,
"step": 1163
},
{
"epoch": 4.968017057569297,
"grad_norm": 0.24886407641643385,
"learning_rate": 3.2043054369057523e-09,
"loss": 0.1897,
"step": 1164
},
{
"epoch": 4.97228144989339,
"grad_norm": 0.25408212065061436,
"learning_rate": 2.2252302653558155e-09,
"loss": 0.186,
"step": 1165
},
{
"epoch": 4.976545842217484,
"grad_norm": 0.25089979448383903,
"learning_rate": 1.4241568772321856e-09,
"loss": 0.1861,
"step": 1166
},
{
"epoch": 4.980810234541578,
"grad_norm": 0.2377807867704369,
"learning_rate": 8.010924029533406e-10,
"loss": 0.1736,
"step": 1167
},
{
"epoch": 4.985074626865671,
"grad_norm": 0.24764069381058376,
"learning_rate": 3.560423884629849e-10,
"loss": 0.1725,
"step": 1168
},
{
"epoch": 4.9893390191897655,
"grad_norm": 0.24390854347455568,
"learning_rate": 8.901079518786048e-11,
"loss": 0.1762,
"step": 1169
},
{
"epoch": 4.99360341151386,
"grad_norm": 0.25811352424878503,
"learning_rate": 0.0,
"loss": 0.1743,
"step": 1170
},
{
"epoch": 4.99360341151386,
"step": 1170,
"total_flos": 2.47676830262254e+18,
"train_loss": 0.3444926413588035,
"train_runtime": 48626.6745,
"train_samples_per_second": 3.084,
"train_steps_per_second": 0.024
}
],
"logging_steps": 1.0,
"max_steps": 1170,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.47676830262254e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}