xp174 / checkpoint-424 /trainer_state.json
ugaoo's picture
Upload folder using huggingface_hub
d3fd1b0 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.9968652037617556,
"eval_steps": 500,
"global_step": 424,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.004702194357366771,
"grad_norm": 3.1606569290161133,
"learning_rate": 5.0000000000000004e-08,
"loss": 1.0072,
"step": 1
},
{
"epoch": 0.009404388714733543,
"grad_norm": 3.2058725357055664,
"learning_rate": 1.0000000000000001e-07,
"loss": 1.0134,
"step": 2
},
{
"epoch": 0.014106583072100314,
"grad_norm": 2.636291265487671,
"learning_rate": 1.5000000000000002e-07,
"loss": 0.9635,
"step": 3
},
{
"epoch": 0.018808777429467086,
"grad_norm": 2.708746910095215,
"learning_rate": 2.0000000000000002e-07,
"loss": 1.0068,
"step": 4
},
{
"epoch": 0.023510971786833857,
"grad_norm": 2.8948426246643066,
"learning_rate": 2.5000000000000004e-07,
"loss": 0.9608,
"step": 5
},
{
"epoch": 0.02821316614420063,
"grad_norm": 2.8740086555480957,
"learning_rate": 3.0000000000000004e-07,
"loss": 0.9896,
"step": 6
},
{
"epoch": 0.032915360501567396,
"grad_norm": 2.8338170051574707,
"learning_rate": 3.5000000000000004e-07,
"loss": 0.9098,
"step": 7
},
{
"epoch": 0.03761755485893417,
"grad_norm": 2.7783002853393555,
"learning_rate": 4.0000000000000003e-07,
"loss": 0.9733,
"step": 8
},
{
"epoch": 0.04231974921630094,
"grad_norm": 3.043574333190918,
"learning_rate": 4.5000000000000003e-07,
"loss": 0.9943,
"step": 9
},
{
"epoch": 0.047021943573667714,
"grad_norm": 3.142383337020874,
"learning_rate": 5.000000000000001e-07,
"loss": 0.9475,
"step": 10
},
{
"epoch": 0.05172413793103448,
"grad_norm": 2.9817280769348145,
"learning_rate": 5.5e-07,
"loss": 0.9701,
"step": 11
},
{
"epoch": 0.05642633228840126,
"grad_norm": 2.95699405670166,
"learning_rate": 6.000000000000001e-07,
"loss": 0.9983,
"step": 12
},
{
"epoch": 0.061128526645768025,
"grad_norm": 2.8782453536987305,
"learning_rate": 6.5e-07,
"loss": 0.9502,
"step": 13
},
{
"epoch": 0.06583072100313479,
"grad_norm": 2.6715071201324463,
"learning_rate": 7.000000000000001e-07,
"loss": 0.9436,
"step": 14
},
{
"epoch": 0.07053291536050156,
"grad_norm": 3.869649648666382,
"learning_rate": 7.5e-07,
"loss": 0.9692,
"step": 15
},
{
"epoch": 0.07523510971786834,
"grad_norm": 3.060220956802368,
"learning_rate": 8.000000000000001e-07,
"loss": 0.9258,
"step": 16
},
{
"epoch": 0.07993730407523511,
"grad_norm": 2.8922741413116455,
"learning_rate": 8.500000000000001e-07,
"loss": 0.9719,
"step": 17
},
{
"epoch": 0.08463949843260188,
"grad_norm": 2.7857820987701416,
"learning_rate": 9.000000000000001e-07,
"loss": 0.9072,
"step": 18
},
{
"epoch": 0.08934169278996865,
"grad_norm": 2.9753293991088867,
"learning_rate": 9.500000000000001e-07,
"loss": 0.9032,
"step": 19
},
{
"epoch": 0.09404388714733543,
"grad_norm": 2.7989683151245117,
"learning_rate": 1.0000000000000002e-06,
"loss": 0.8887,
"step": 20
},
{
"epoch": 0.0987460815047022,
"grad_norm": 2.3953049182891846,
"learning_rate": 1.0500000000000001e-06,
"loss": 0.8968,
"step": 21
},
{
"epoch": 0.10344827586206896,
"grad_norm": 2.643731117248535,
"learning_rate": 1.1e-06,
"loss": 0.8501,
"step": 22
},
{
"epoch": 0.10815047021943573,
"grad_norm": 2.3679006099700928,
"learning_rate": 1.1500000000000002e-06,
"loss": 0.8476,
"step": 23
},
{
"epoch": 0.11285266457680251,
"grad_norm": 2.5935540199279785,
"learning_rate": 1.2000000000000002e-06,
"loss": 0.8095,
"step": 24
},
{
"epoch": 0.11755485893416928,
"grad_norm": 2.510300636291504,
"learning_rate": 1.25e-06,
"loss": 0.8099,
"step": 25
},
{
"epoch": 0.12225705329153605,
"grad_norm": 2.372344970703125,
"learning_rate": 1.3e-06,
"loss": 0.7869,
"step": 26
},
{
"epoch": 0.12695924764890282,
"grad_norm": 2.303426504135132,
"learning_rate": 1.3500000000000002e-06,
"loss": 0.7758,
"step": 27
},
{
"epoch": 0.13166144200626959,
"grad_norm": 1.9017939567565918,
"learning_rate": 1.4000000000000001e-06,
"loss": 0.7498,
"step": 28
},
{
"epoch": 0.13636363636363635,
"grad_norm": 1.8810580968856812,
"learning_rate": 1.45e-06,
"loss": 0.7878,
"step": 29
},
{
"epoch": 0.14106583072100312,
"grad_norm": 1.7797424793243408,
"learning_rate": 1.5e-06,
"loss": 0.7747,
"step": 30
},
{
"epoch": 0.14576802507836992,
"grad_norm": 1.5053879022598267,
"learning_rate": 1.5500000000000002e-06,
"loss": 0.7735,
"step": 31
},
{
"epoch": 0.15047021943573669,
"grad_norm": 1.4909234046936035,
"learning_rate": 1.6000000000000001e-06,
"loss": 0.7654,
"step": 32
},
{
"epoch": 0.15517241379310345,
"grad_norm": 1.36083984375,
"learning_rate": 1.6500000000000003e-06,
"loss": 0.6895,
"step": 33
},
{
"epoch": 0.15987460815047022,
"grad_norm": 1.536014199256897,
"learning_rate": 1.7000000000000002e-06,
"loss": 0.675,
"step": 34
},
{
"epoch": 0.164576802507837,
"grad_norm": 1.3426779508590698,
"learning_rate": 1.75e-06,
"loss": 0.7652,
"step": 35
},
{
"epoch": 0.16927899686520376,
"grad_norm": 1.4900612831115723,
"learning_rate": 1.8000000000000001e-06,
"loss": 0.6863,
"step": 36
},
{
"epoch": 0.17398119122257052,
"grad_norm": 1.181241750717163,
"learning_rate": 1.85e-06,
"loss": 0.7136,
"step": 37
},
{
"epoch": 0.1786833855799373,
"grad_norm": 1.461419701576233,
"learning_rate": 1.9000000000000002e-06,
"loss": 0.7606,
"step": 38
},
{
"epoch": 0.1833855799373041,
"grad_norm": 1.04817795753479,
"learning_rate": 1.9500000000000004e-06,
"loss": 0.6829,
"step": 39
},
{
"epoch": 0.18808777429467086,
"grad_norm": 1.0499993562698364,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.7144,
"step": 40
},
{
"epoch": 0.19278996865203762,
"grad_norm": 0.9935064315795898,
"learning_rate": 2.05e-06,
"loss": 0.6736,
"step": 41
},
{
"epoch": 0.1974921630094044,
"grad_norm": 0.9919099807739258,
"learning_rate": 2.1000000000000002e-06,
"loss": 0.7151,
"step": 42
},
{
"epoch": 0.20219435736677116,
"grad_norm": 0.919556200504303,
"learning_rate": 2.15e-06,
"loss": 0.6847,
"step": 43
},
{
"epoch": 0.20689655172413793,
"grad_norm": 1.4762015342712402,
"learning_rate": 2.2e-06,
"loss": 0.6694,
"step": 44
},
{
"epoch": 0.2115987460815047,
"grad_norm": 0.9243163466453552,
"learning_rate": 2.25e-06,
"loss": 0.6489,
"step": 45
},
{
"epoch": 0.21630094043887146,
"grad_norm": 0.7614469528198242,
"learning_rate": 2.3000000000000004e-06,
"loss": 0.6568,
"step": 46
},
{
"epoch": 0.22100313479623823,
"grad_norm": 0.7543922662734985,
"learning_rate": 2.35e-06,
"loss": 0.6359,
"step": 47
},
{
"epoch": 0.22570532915360503,
"grad_norm": 0.7558912038803101,
"learning_rate": 2.4000000000000003e-06,
"loss": 0.6231,
"step": 48
},
{
"epoch": 0.2304075235109718,
"grad_norm": 0.7822129130363464,
"learning_rate": 2.4500000000000003e-06,
"loss": 0.6691,
"step": 49
},
{
"epoch": 0.23510971786833856,
"grad_norm": 0.8646999597549438,
"learning_rate": 2.5e-06,
"loss": 0.682,
"step": 50
},
{
"epoch": 0.23981191222570533,
"grad_norm": 0.8824774622917175,
"learning_rate": 2.55e-06,
"loss": 0.6805,
"step": 51
},
{
"epoch": 0.2445141065830721,
"grad_norm": 0.7697399258613586,
"learning_rate": 2.6e-06,
"loss": 0.6368,
"step": 52
},
{
"epoch": 0.24921630094043887,
"grad_norm": 0.6522512435913086,
"learning_rate": 2.6500000000000005e-06,
"loss": 0.6367,
"step": 53
},
{
"epoch": 0.25391849529780564,
"grad_norm": 0.6172305941581726,
"learning_rate": 2.7000000000000004e-06,
"loss": 0.6291,
"step": 54
},
{
"epoch": 0.25862068965517243,
"grad_norm": 0.7860460877418518,
"learning_rate": 2.7500000000000004e-06,
"loss": 0.6736,
"step": 55
},
{
"epoch": 0.26332288401253917,
"grad_norm": 0.6474862694740295,
"learning_rate": 2.8000000000000003e-06,
"loss": 0.6365,
"step": 56
},
{
"epoch": 0.26802507836990597,
"grad_norm": 0.6867114901542664,
"learning_rate": 2.85e-06,
"loss": 0.6397,
"step": 57
},
{
"epoch": 0.2727272727272727,
"grad_norm": 0.7056852579116821,
"learning_rate": 2.9e-06,
"loss": 0.6138,
"step": 58
},
{
"epoch": 0.2774294670846395,
"grad_norm": 0.6615664958953857,
"learning_rate": 2.95e-06,
"loss": 0.6482,
"step": 59
},
{
"epoch": 0.28213166144200624,
"grad_norm": 0.6649022102355957,
"learning_rate": 3e-06,
"loss": 0.6745,
"step": 60
},
{
"epoch": 0.28683385579937304,
"grad_norm": 0.850848913192749,
"learning_rate": 3.05e-06,
"loss": 0.5956,
"step": 61
},
{
"epoch": 0.29153605015673983,
"grad_norm": 0.5983562469482422,
"learning_rate": 3.1000000000000004e-06,
"loss": 0.5894,
"step": 62
},
{
"epoch": 0.2962382445141066,
"grad_norm": 0.6286782622337341,
"learning_rate": 3.1500000000000003e-06,
"loss": 0.6329,
"step": 63
},
{
"epoch": 0.30094043887147337,
"grad_norm": 0.5919945240020752,
"learning_rate": 3.2000000000000003e-06,
"loss": 0.6402,
"step": 64
},
{
"epoch": 0.3056426332288401,
"grad_norm": 0.5632765889167786,
"learning_rate": 3.2500000000000002e-06,
"loss": 0.5862,
"step": 65
},
{
"epoch": 0.3103448275862069,
"grad_norm": 0.7692590951919556,
"learning_rate": 3.3000000000000006e-06,
"loss": 0.6031,
"step": 66
},
{
"epoch": 0.31504702194357365,
"grad_norm": 0.7313893437385559,
"learning_rate": 3.3500000000000005e-06,
"loss": 0.6312,
"step": 67
},
{
"epoch": 0.31974921630094044,
"grad_norm": 0.6097120642662048,
"learning_rate": 3.4000000000000005e-06,
"loss": 0.5986,
"step": 68
},
{
"epoch": 0.32445141065830724,
"grad_norm": 0.5853808522224426,
"learning_rate": 3.45e-06,
"loss": 0.5847,
"step": 69
},
{
"epoch": 0.329153605015674,
"grad_norm": 0.6093555092811584,
"learning_rate": 3.5e-06,
"loss": 0.6552,
"step": 70
},
{
"epoch": 0.3338557993730408,
"grad_norm": 0.6106334328651428,
"learning_rate": 3.5500000000000003e-06,
"loss": 0.6196,
"step": 71
},
{
"epoch": 0.3385579937304075,
"grad_norm": 0.9254828691482544,
"learning_rate": 3.6000000000000003e-06,
"loss": 0.6005,
"step": 72
},
{
"epoch": 0.3432601880877743,
"grad_norm": 0.5471694469451904,
"learning_rate": 3.65e-06,
"loss": 0.5907,
"step": 73
},
{
"epoch": 0.34796238244514105,
"grad_norm": 0.6204228401184082,
"learning_rate": 3.7e-06,
"loss": 0.6079,
"step": 74
},
{
"epoch": 0.35266457680250785,
"grad_norm": 0.52458256483078,
"learning_rate": 3.7500000000000005e-06,
"loss": 0.6001,
"step": 75
},
{
"epoch": 0.3573667711598746,
"grad_norm": 0.5356763601303101,
"learning_rate": 3.8000000000000005e-06,
"loss": 0.5987,
"step": 76
},
{
"epoch": 0.3620689655172414,
"grad_norm": 0.5408467054367065,
"learning_rate": 3.85e-06,
"loss": 0.6104,
"step": 77
},
{
"epoch": 0.3667711598746082,
"grad_norm": 0.5075871348381042,
"learning_rate": 3.900000000000001e-06,
"loss": 0.5569,
"step": 78
},
{
"epoch": 0.3714733542319749,
"grad_norm": 0.8474109768867493,
"learning_rate": 3.95e-06,
"loss": 0.6195,
"step": 79
},
{
"epoch": 0.3761755485893417,
"grad_norm": 0.4750897288322449,
"learning_rate": 4.000000000000001e-06,
"loss": 0.5399,
"step": 80
},
{
"epoch": 0.38087774294670845,
"grad_norm": 0.5082002878189087,
"learning_rate": 4.05e-06,
"loss": 0.5997,
"step": 81
},
{
"epoch": 0.38557993730407525,
"grad_norm": 0.5343796014785767,
"learning_rate": 4.1e-06,
"loss": 0.5704,
"step": 82
},
{
"epoch": 0.390282131661442,
"grad_norm": 0.520311713218689,
"learning_rate": 4.15e-06,
"loss": 0.5818,
"step": 83
},
{
"epoch": 0.3949843260188088,
"grad_norm": 0.5292978286743164,
"learning_rate": 4.2000000000000004e-06,
"loss": 0.5852,
"step": 84
},
{
"epoch": 0.3996865203761755,
"grad_norm": 0.539886474609375,
"learning_rate": 4.25e-06,
"loss": 0.6057,
"step": 85
},
{
"epoch": 0.4043887147335423,
"grad_norm": 0.6468827128410339,
"learning_rate": 4.3e-06,
"loss": 0.6122,
"step": 86
},
{
"epoch": 0.4090909090909091,
"grad_norm": 0.5537365078926086,
"learning_rate": 4.350000000000001e-06,
"loss": 0.5652,
"step": 87
},
{
"epoch": 0.41379310344827586,
"grad_norm": 0.6226018667221069,
"learning_rate": 4.4e-06,
"loss": 0.5884,
"step": 88
},
{
"epoch": 0.41849529780564265,
"grad_norm": 0.5016945004463196,
"learning_rate": 4.450000000000001e-06,
"loss": 0.5877,
"step": 89
},
{
"epoch": 0.4231974921630094,
"grad_norm": 0.5059167146682739,
"learning_rate": 4.5e-06,
"loss": 0.5676,
"step": 90
},
{
"epoch": 0.4278996865203762,
"grad_norm": 0.47521743178367615,
"learning_rate": 4.5500000000000005e-06,
"loss": 0.5929,
"step": 91
},
{
"epoch": 0.43260188087774293,
"grad_norm": 0.531306266784668,
"learning_rate": 4.600000000000001e-06,
"loss": 0.5983,
"step": 92
},
{
"epoch": 0.4373040752351097,
"grad_norm": 0.4965567886829376,
"learning_rate": 4.65e-06,
"loss": 0.5279,
"step": 93
},
{
"epoch": 0.44200626959247646,
"grad_norm": 0.5125988125801086,
"learning_rate": 4.7e-06,
"loss": 0.5436,
"step": 94
},
{
"epoch": 0.44670846394984326,
"grad_norm": 0.557763934135437,
"learning_rate": 4.75e-06,
"loss": 0.5496,
"step": 95
},
{
"epoch": 0.45141065830721006,
"grad_norm": 0.6993274092674255,
"learning_rate": 4.800000000000001e-06,
"loss": 0.5498,
"step": 96
},
{
"epoch": 0.4561128526645768,
"grad_norm": 0.5485453009605408,
"learning_rate": 4.85e-06,
"loss": 0.5552,
"step": 97
},
{
"epoch": 0.4608150470219436,
"grad_norm": 1.9821522235870361,
"learning_rate": 4.9000000000000005e-06,
"loss": 0.569,
"step": 98
},
{
"epoch": 0.46551724137931033,
"grad_norm": 0.6074144840240479,
"learning_rate": 4.95e-06,
"loss": 0.5546,
"step": 99
},
{
"epoch": 0.4702194357366771,
"grad_norm": 0.5404040813446045,
"learning_rate": 5e-06,
"loss": 0.5775,
"step": 100
},
{
"epoch": 0.47492163009404387,
"grad_norm": 0.500438928604126,
"learning_rate": 4.9999910183883085e-06,
"loss": 0.5569,
"step": 101
},
{
"epoch": 0.47962382445141066,
"grad_norm": 0.5036981701850891,
"learning_rate": 4.999964073617768e-06,
"loss": 0.5663,
"step": 102
},
{
"epoch": 0.4843260188087774,
"grad_norm": 0.4537642300128937,
"learning_rate": 4.999919165881985e-06,
"loss": 0.5527,
"step": 103
},
{
"epoch": 0.4890282131661442,
"grad_norm": 0.49653521180152893,
"learning_rate": 4.999856295503635e-06,
"loss": 0.563,
"step": 104
},
{
"epoch": 0.493730407523511,
"grad_norm": 0.46847566962242126,
"learning_rate": 4.9997754629344596e-06,
"loss": 0.5425,
"step": 105
},
{
"epoch": 0.49843260188087773,
"grad_norm": 0.5192411541938782,
"learning_rate": 4.999676668755263e-06,
"loss": 0.5315,
"step": 106
},
{
"epoch": 0.5031347962382445,
"grad_norm": 0.5170287489891052,
"learning_rate": 4.999559913675912e-06,
"loss": 0.5627,
"step": 107
},
{
"epoch": 0.5078369905956113,
"grad_norm": 0.47297438979148865,
"learning_rate": 4.999425198535325e-06,
"loss": 0.5432,
"step": 108
},
{
"epoch": 0.512539184952978,
"grad_norm": 0.4873776137828827,
"learning_rate": 4.999272524301469e-06,
"loss": 0.5473,
"step": 109
},
{
"epoch": 0.5172413793103449,
"grad_norm": 0.5432935357093811,
"learning_rate": 4.9991018920713505e-06,
"loss": 0.5642,
"step": 110
},
{
"epoch": 0.5219435736677116,
"grad_norm": 0.4850105345249176,
"learning_rate": 4.9989133030710154e-06,
"loss": 0.548,
"step": 111
},
{
"epoch": 0.5266457680250783,
"grad_norm": 0.9399585723876953,
"learning_rate": 4.9987067586555275e-06,
"loss": 0.5471,
"step": 112
},
{
"epoch": 0.5313479623824452,
"grad_norm": 0.5167811512947083,
"learning_rate": 4.998482260308969e-06,
"loss": 0.5648,
"step": 113
},
{
"epoch": 0.5360501567398119,
"grad_norm": 0.5069029927253723,
"learning_rate": 4.998239809644427e-06,
"loss": 0.5568,
"step": 114
},
{
"epoch": 0.5407523510971787,
"grad_norm": 0.8738563656806946,
"learning_rate": 4.9979794084039755e-06,
"loss": 0.5719,
"step": 115
},
{
"epoch": 0.5454545454545454,
"grad_norm": 0.5216553807258606,
"learning_rate": 4.997701058458677e-06,
"loss": 0.5309,
"step": 116
},
{
"epoch": 0.5501567398119123,
"grad_norm": 0.9678344130516052,
"learning_rate": 4.997404761808554e-06,
"loss": 0.5645,
"step": 117
},
{
"epoch": 0.554858934169279,
"grad_norm": 0.496598482131958,
"learning_rate": 4.9970905205825845e-06,
"loss": 0.5711,
"step": 118
},
{
"epoch": 0.5595611285266457,
"grad_norm": 0.4745199680328369,
"learning_rate": 4.996758337038683e-06,
"loss": 0.5613,
"step": 119
},
{
"epoch": 0.5642633228840125,
"grad_norm": 0.5595977902412415,
"learning_rate": 4.996408213563684e-06,
"loss": 0.5559,
"step": 120
},
{
"epoch": 0.5689655172413793,
"grad_norm": 0.4743712544441223,
"learning_rate": 4.996040152673326e-06,
"loss": 0.5228,
"step": 121
},
{
"epoch": 0.5736677115987461,
"grad_norm": 0.5418100953102112,
"learning_rate": 4.995654157012233e-06,
"loss": 0.536,
"step": 122
},
{
"epoch": 0.5783699059561128,
"grad_norm": 0.521977424621582,
"learning_rate": 4.995250229353895e-06,
"loss": 0.5305,
"step": 123
},
{
"epoch": 0.5830721003134797,
"grad_norm": 0.5062761902809143,
"learning_rate": 4.99482837260065e-06,
"loss": 0.5417,
"step": 124
},
{
"epoch": 0.5877742946708464,
"grad_norm": 0.5895913243293762,
"learning_rate": 4.99438858978366e-06,
"loss": 0.573,
"step": 125
},
{
"epoch": 0.5924764890282131,
"grad_norm": 0.5442466139793396,
"learning_rate": 4.993930884062892e-06,
"loss": 0.5563,
"step": 126
},
{
"epoch": 0.5971786833855799,
"grad_norm": 0.5130571722984314,
"learning_rate": 4.993455258727094e-06,
"loss": 0.5549,
"step": 127
},
{
"epoch": 0.6018808777429467,
"grad_norm": 0.5579081773757935,
"learning_rate": 4.992961717193773e-06,
"loss": 0.5554,
"step": 128
},
{
"epoch": 0.6065830721003135,
"grad_norm": 0.6375890374183655,
"learning_rate": 4.9924502630091655e-06,
"loss": 0.5626,
"step": 129
},
{
"epoch": 0.6112852664576802,
"grad_norm": 0.5129190683364868,
"learning_rate": 4.99192089984822e-06,
"loss": 0.5493,
"step": 130
},
{
"epoch": 0.6159874608150471,
"grad_norm": 0.5293419361114502,
"learning_rate": 4.9913736315145614e-06,
"loss": 0.5565,
"step": 131
},
{
"epoch": 0.6206896551724138,
"grad_norm": 0.6502572298049927,
"learning_rate": 4.990808461940474e-06,
"loss": 0.5358,
"step": 132
},
{
"epoch": 0.6253918495297806,
"grad_norm": 0.5450296998023987,
"learning_rate": 4.990225395186862e-06,
"loss": 0.5421,
"step": 133
},
{
"epoch": 0.6300940438871473,
"grad_norm": 0.45506399869918823,
"learning_rate": 4.9896244354432314e-06,
"loss": 0.5396,
"step": 134
},
{
"epoch": 0.6347962382445141,
"grad_norm": 0.5095545649528503,
"learning_rate": 4.98900558702765e-06,
"loss": 0.5486,
"step": 135
},
{
"epoch": 0.6394984326018809,
"grad_norm": 0.4836446940898895,
"learning_rate": 4.9883688543867225e-06,
"loss": 0.5596,
"step": 136
},
{
"epoch": 0.6442006269592476,
"grad_norm": 0.5253512859344482,
"learning_rate": 4.987714242095558e-06,
"loss": 0.5308,
"step": 137
},
{
"epoch": 0.6489028213166145,
"grad_norm": 0.8280164003372192,
"learning_rate": 4.9870417548577355e-06,
"loss": 0.5349,
"step": 138
},
{
"epoch": 0.6536050156739812,
"grad_norm": 0.4729730188846588,
"learning_rate": 4.9863513975052696e-06,
"loss": 0.5416,
"step": 139
},
{
"epoch": 0.658307210031348,
"grad_norm": 0.5932718515396118,
"learning_rate": 4.985643174998578e-06,
"loss": 0.5638,
"step": 140
},
{
"epoch": 0.6630094043887147,
"grad_norm": 0.5187026262283325,
"learning_rate": 4.984917092426445e-06,
"loss": 0.5507,
"step": 141
},
{
"epoch": 0.6677115987460815,
"grad_norm": 0.5024245977401733,
"learning_rate": 4.984173155005982e-06,
"loss": 0.5406,
"step": 142
},
{
"epoch": 0.6724137931034483,
"grad_norm": 0.4735509157180786,
"learning_rate": 4.983411368082597e-06,
"loss": 0.5431,
"step": 143
},
{
"epoch": 0.677115987460815,
"grad_norm": 0.5040024518966675,
"learning_rate": 4.982631737129948e-06,
"loss": 0.5291,
"step": 144
},
{
"epoch": 0.6818181818181818,
"grad_norm": 0.47764894366264343,
"learning_rate": 4.98183426774991e-06,
"loss": 0.5677,
"step": 145
},
{
"epoch": 0.6865203761755486,
"grad_norm": 0.5211489796638489,
"learning_rate": 4.981018965672529e-06,
"loss": 0.566,
"step": 146
},
{
"epoch": 0.6912225705329154,
"grad_norm": 1.022007942199707,
"learning_rate": 4.98018583675599e-06,
"loss": 0.5476,
"step": 147
},
{
"epoch": 0.6959247648902821,
"grad_norm": 0.5263912677764893,
"learning_rate": 4.979334886986562e-06,
"loss": 0.5473,
"step": 148
},
{
"epoch": 0.700626959247649,
"grad_norm": 0.5014091730117798,
"learning_rate": 4.978466122478567e-06,
"loss": 0.5642,
"step": 149
},
{
"epoch": 0.7053291536050157,
"grad_norm": 0.5003350973129272,
"learning_rate": 4.97757954947433e-06,
"loss": 0.5311,
"step": 150
},
{
"epoch": 0.7100313479623824,
"grad_norm": 0.5010690093040466,
"learning_rate": 4.976675174344132e-06,
"loss": 0.5469,
"step": 151
},
{
"epoch": 0.7147335423197492,
"grad_norm": 0.45779237151145935,
"learning_rate": 4.975753003586172e-06,
"loss": 0.5273,
"step": 152
},
{
"epoch": 0.719435736677116,
"grad_norm": 0.6231539845466614,
"learning_rate": 4.974813043826513e-06,
"loss": 0.5182,
"step": 153
},
{
"epoch": 0.7241379310344828,
"grad_norm": 0.5361394286155701,
"learning_rate": 4.973855301819039e-06,
"loss": 0.5372,
"step": 154
},
{
"epoch": 0.7288401253918495,
"grad_norm": 0.5193538665771484,
"learning_rate": 4.972879784445402e-06,
"loss": 0.5201,
"step": 155
},
{
"epoch": 0.7335423197492164,
"grad_norm": 0.47956809401512146,
"learning_rate": 4.971886498714978e-06,
"loss": 0.5402,
"step": 156
},
{
"epoch": 0.7382445141065831,
"grad_norm": 0.5303016901016235,
"learning_rate": 4.97087545176481e-06,
"loss": 0.5174,
"step": 157
},
{
"epoch": 0.7429467084639498,
"grad_norm": 0.5002286434173584,
"learning_rate": 4.9698466508595655e-06,
"loss": 0.5453,
"step": 158
},
{
"epoch": 0.7476489028213166,
"grad_norm": 0.6070297360420227,
"learning_rate": 4.9688001033914756e-06,
"loss": 0.5327,
"step": 159
},
{
"epoch": 0.7523510971786834,
"grad_norm": 0.5436793565750122,
"learning_rate": 4.967735816880286e-06,
"loss": 0.544,
"step": 160
},
{
"epoch": 0.7570532915360502,
"grad_norm": 0.538012683391571,
"learning_rate": 4.966653798973205e-06,
"loss": 0.5233,
"step": 161
},
{
"epoch": 0.7617554858934169,
"grad_norm": 0.4916169345378876,
"learning_rate": 4.965554057444842e-06,
"loss": 0.5168,
"step": 162
},
{
"epoch": 0.7664576802507836,
"grad_norm": 0.48281437158584595,
"learning_rate": 4.964436600197161e-06,
"loss": 0.5393,
"step": 163
},
{
"epoch": 0.7711598746081505,
"grad_norm": 0.5184990167617798,
"learning_rate": 4.963301435259413e-06,
"loss": 0.5085,
"step": 164
},
{
"epoch": 0.7758620689655172,
"grad_norm": 0.4706438183784485,
"learning_rate": 4.962148570788088e-06,
"loss": 0.5299,
"step": 165
},
{
"epoch": 0.780564263322884,
"grad_norm": 0.6550764441490173,
"learning_rate": 4.96097801506685e-06,
"loss": 0.5192,
"step": 166
},
{
"epoch": 0.7852664576802508,
"grad_norm": 0.5386581420898438,
"learning_rate": 4.959789776506482e-06,
"loss": 0.5258,
"step": 167
},
{
"epoch": 0.7899686520376176,
"grad_norm": 0.5060779452323914,
"learning_rate": 4.958583863644821e-06,
"loss": 0.5512,
"step": 168
},
{
"epoch": 0.7946708463949843,
"grad_norm": 0.47050032019615173,
"learning_rate": 4.9573602851466985e-06,
"loss": 0.5176,
"step": 169
},
{
"epoch": 0.799373040752351,
"grad_norm": 7.3139567375183105,
"learning_rate": 4.9561190498038815e-06,
"loss": 0.5381,
"step": 170
},
{
"epoch": 0.8040752351097179,
"grad_norm": 0.620528519153595,
"learning_rate": 4.954860166535005e-06,
"loss": 0.5299,
"step": 171
},
{
"epoch": 0.8087774294670846,
"grad_norm": 0.45067766308784485,
"learning_rate": 4.95358364438551e-06,
"loss": 0.5328,
"step": 172
},
{
"epoch": 0.8134796238244514,
"grad_norm": 0.6771508455276489,
"learning_rate": 4.952289492527576e-06,
"loss": 0.5601,
"step": 173
},
{
"epoch": 0.8181818181818182,
"grad_norm": 0.518925130367279,
"learning_rate": 4.9509777202600605e-06,
"loss": 0.494,
"step": 174
},
{
"epoch": 0.822884012539185,
"grad_norm": 0.5191988945007324,
"learning_rate": 4.949648337008425e-06,
"loss": 0.5425,
"step": 175
},
{
"epoch": 0.8275862068965517,
"grad_norm": 0.8600963354110718,
"learning_rate": 4.948301352324674e-06,
"loss": 0.5332,
"step": 176
},
{
"epoch": 0.8322884012539185,
"grad_norm": 0.5405915379524231,
"learning_rate": 4.946936775887281e-06,
"loss": 0.5276,
"step": 177
},
{
"epoch": 0.8369905956112853,
"grad_norm": 0.48730772733688354,
"learning_rate": 4.945554617501124e-06,
"loss": 0.5217,
"step": 178
},
{
"epoch": 0.841692789968652,
"grad_norm": 0.5092865824699402,
"learning_rate": 4.944154887097411e-06,
"loss": 0.5534,
"step": 179
},
{
"epoch": 0.8463949843260188,
"grad_norm": 0.4994933605194092,
"learning_rate": 4.942737594733608e-06,
"loss": 0.5242,
"step": 180
},
{
"epoch": 0.8510971786833855,
"grad_norm": 0.4554043412208557,
"learning_rate": 4.941302750593373e-06,
"loss": 0.5424,
"step": 181
},
{
"epoch": 0.8557993730407524,
"grad_norm": 0.4865265488624573,
"learning_rate": 4.939850364986475e-06,
"loss": 0.482,
"step": 182
},
{
"epoch": 0.8605015673981191,
"grad_norm": 0.5013875365257263,
"learning_rate": 4.938380448348725e-06,
"loss": 0.4908,
"step": 183
},
{
"epoch": 0.8652037617554859,
"grad_norm": 0.4997917115688324,
"learning_rate": 4.9368930112419e-06,
"loss": 0.5336,
"step": 184
},
{
"epoch": 0.8699059561128527,
"grad_norm": 0.4783482551574707,
"learning_rate": 4.935388064353665e-06,
"loss": 0.5338,
"step": 185
},
{
"epoch": 0.8746081504702194,
"grad_norm": 0.7221089005470276,
"learning_rate": 4.9338656184975e-06,
"loss": 0.5327,
"step": 186
},
{
"epoch": 0.8793103448275862,
"grad_norm": 0.48115843534469604,
"learning_rate": 4.932325684612618e-06,
"loss": 0.5408,
"step": 187
},
{
"epoch": 0.8840125391849529,
"grad_norm": 0.4940219223499298,
"learning_rate": 4.93076827376389e-06,
"loss": 0.5455,
"step": 188
},
{
"epoch": 0.8887147335423198,
"grad_norm": 0.4754747450351715,
"learning_rate": 4.9291933971417635e-06,
"loss": 0.542,
"step": 189
},
{
"epoch": 0.8934169278996865,
"grad_norm": 0.548713207244873,
"learning_rate": 4.9276010660621835e-06,
"loss": 0.5292,
"step": 190
},
{
"epoch": 0.8981191222570533,
"grad_norm": 0.7292612195014954,
"learning_rate": 4.925991291966508e-06,
"loss": 0.5073,
"step": 191
},
{
"epoch": 0.9028213166144201,
"grad_norm": 0.5254770517349243,
"learning_rate": 4.92436408642143e-06,
"loss": 0.5451,
"step": 192
},
{
"epoch": 0.9075235109717869,
"grad_norm": 0.47938767075538635,
"learning_rate": 4.9227194611188934e-06,
"loss": 0.5204,
"step": 193
},
{
"epoch": 0.9122257053291536,
"grad_norm": 0.6740232706069946,
"learning_rate": 4.921057427876007e-06,
"loss": 0.4928,
"step": 194
},
{
"epoch": 0.9169278996865203,
"grad_norm": 0.5455343723297119,
"learning_rate": 4.919377998634959e-06,
"loss": 0.5468,
"step": 195
},
{
"epoch": 0.9216300940438872,
"grad_norm": 0.5001958012580872,
"learning_rate": 4.917681185462934e-06,
"loss": 0.5339,
"step": 196
},
{
"epoch": 0.9263322884012539,
"grad_norm": 0.5084257125854492,
"learning_rate": 4.915967000552028e-06,
"loss": 0.5259,
"step": 197
},
{
"epoch": 0.9310344827586207,
"grad_norm": 0.4807164967060089,
"learning_rate": 4.914235456219154e-06,
"loss": 0.5204,
"step": 198
},
{
"epoch": 0.9357366771159875,
"grad_norm": 0.6099370718002319,
"learning_rate": 4.912486564905959e-06,
"loss": 0.544,
"step": 199
},
{
"epoch": 0.9404388714733543,
"grad_norm": 0.47461947798728943,
"learning_rate": 4.910720339178735e-06,
"loss": 0.5295,
"step": 200
},
{
"epoch": 0.945141065830721,
"grad_norm": 0.500136137008667,
"learning_rate": 4.908936791728323e-06,
"loss": 0.5321,
"step": 201
},
{
"epoch": 0.9498432601880877,
"grad_norm": 0.5235631465911865,
"learning_rate": 4.907135935370027e-06,
"loss": 0.5338,
"step": 202
},
{
"epoch": 0.9545454545454546,
"grad_norm": 0.9285804629325867,
"learning_rate": 4.905317783043523e-06,
"loss": 0.5393,
"step": 203
},
{
"epoch": 0.9592476489028213,
"grad_norm": 0.4834178388118744,
"learning_rate": 4.9034823478127605e-06,
"loss": 0.5211,
"step": 204
},
{
"epoch": 0.9639498432601881,
"grad_norm": 0.4830580949783325,
"learning_rate": 4.901629642865872e-06,
"loss": 0.4986,
"step": 205
},
{
"epoch": 0.9686520376175548,
"grad_norm": 0.49718615412712097,
"learning_rate": 4.89975968151508e-06,
"loss": 0.5204,
"step": 206
},
{
"epoch": 0.9733542319749217,
"grad_norm": 0.5056726336479187,
"learning_rate": 4.8978724771965965e-06,
"loss": 0.5133,
"step": 207
},
{
"epoch": 0.9780564263322884,
"grad_norm": 0.7357563376426697,
"learning_rate": 4.895968043470532e-06,
"loss": 0.5307,
"step": 208
},
{
"epoch": 0.9827586206896551,
"grad_norm": 0.515610933303833,
"learning_rate": 4.894046394020794e-06,
"loss": 0.4955,
"step": 209
},
{
"epoch": 0.987460815047022,
"grad_norm": 0.5124618411064148,
"learning_rate": 4.892107542654988e-06,
"loss": 0.526,
"step": 210
},
{
"epoch": 0.9921630094043887,
"grad_norm": 0.5059565901756287,
"learning_rate": 4.890151503304325e-06,
"loss": 0.5473,
"step": 211
},
{
"epoch": 0.9968652037617555,
"grad_norm": 0.4806717336177826,
"learning_rate": 4.88817829002351e-06,
"loss": 0.5212,
"step": 212
},
{
"epoch": 1.0047021943573669,
"grad_norm": 0.9454345703125,
"learning_rate": 4.886187916990653e-06,
"loss": 1.0566,
"step": 213
},
{
"epoch": 1.0094043887147335,
"grad_norm": 0.4871070086956024,
"learning_rate": 4.884180398507163e-06,
"loss": 0.503,
"step": 214
},
{
"epoch": 1.0141065830721003,
"grad_norm": 0.45102012157440186,
"learning_rate": 4.882155748997636e-06,
"loss": 0.4954,
"step": 215
},
{
"epoch": 1.0188087774294672,
"grad_norm": 0.49910685420036316,
"learning_rate": 4.8801139830097685e-06,
"loss": 0.5019,
"step": 216
},
{
"epoch": 1.0235109717868338,
"grad_norm": 0.5155763030052185,
"learning_rate": 4.878055115214238e-06,
"loss": 0.5102,
"step": 217
},
{
"epoch": 1.0282131661442007,
"grad_norm": 0.4567059874534607,
"learning_rate": 4.875979160404607e-06,
"loss": 0.5069,
"step": 218
},
{
"epoch": 1.0329153605015673,
"grad_norm": 0.4782896935939789,
"learning_rate": 4.873886133497209e-06,
"loss": 0.5182,
"step": 219
},
{
"epoch": 1.0376175548589341,
"grad_norm": 0.44995731115341187,
"learning_rate": 4.87177604953105e-06,
"loss": 0.513,
"step": 220
},
{
"epoch": 1.042319749216301,
"grad_norm": 0.470059871673584,
"learning_rate": 4.869648923667694e-06,
"loss": 0.468,
"step": 221
},
{
"epoch": 1.0470219435736676,
"grad_norm": 0.5356128215789795,
"learning_rate": 4.867504771191154e-06,
"loss": 0.4942,
"step": 222
},
{
"epoch": 1.0517241379310345,
"grad_norm": 0.5137870907783508,
"learning_rate": 4.865343607507788e-06,
"loss": 0.5022,
"step": 223
},
{
"epoch": 1.0564263322884013,
"grad_norm": 0.47419992089271545,
"learning_rate": 4.86316544814618e-06,
"loss": 0.5158,
"step": 224
},
{
"epoch": 1.061128526645768,
"grad_norm": 0.49087393283843994,
"learning_rate": 4.860970308757038e-06,
"loss": 0.4605,
"step": 225
},
{
"epoch": 1.0658307210031348,
"grad_norm": 0.4988348186016083,
"learning_rate": 4.858758205113072e-06,
"loss": 0.4912,
"step": 226
},
{
"epoch": 1.0705329153605017,
"grad_norm": 0.44543248414993286,
"learning_rate": 4.856529153108888e-06,
"loss": 0.524,
"step": 227
},
{
"epoch": 1.0752351097178683,
"grad_norm": 0.5953351259231567,
"learning_rate": 4.854283168760868e-06,
"loss": 0.5001,
"step": 228
},
{
"epoch": 1.0799373040752351,
"grad_norm": 0.5012004375457764,
"learning_rate": 4.85202026820706e-06,
"loss": 0.4968,
"step": 229
},
{
"epoch": 1.084639498432602,
"grad_norm": 0.5023937821388245,
"learning_rate": 4.84974046770706e-06,
"loss": 0.5345,
"step": 230
},
{
"epoch": 1.0893416927899686,
"grad_norm": 0.4705684185028076,
"learning_rate": 4.847443783641893e-06,
"loss": 0.4459,
"step": 231
},
{
"epoch": 1.0940438871473355,
"grad_norm": 0.5082476735115051,
"learning_rate": 4.845130232513901e-06,
"loss": 0.4905,
"step": 232
},
{
"epoch": 1.098746081504702,
"grad_norm": 0.5283995866775513,
"learning_rate": 4.842799830946615e-06,
"loss": 0.4878,
"step": 233
},
{
"epoch": 1.103448275862069,
"grad_norm": 0.6373623013496399,
"learning_rate": 4.840452595684646e-06,
"loss": 0.4867,
"step": 234
},
{
"epoch": 1.1081504702194358,
"grad_norm": 0.4624481201171875,
"learning_rate": 4.83808854359356e-06,
"loss": 0.4793,
"step": 235
},
{
"epoch": 1.1128526645768024,
"grad_norm": 0.4659098982810974,
"learning_rate": 4.835707691659753e-06,
"loss": 0.4827,
"step": 236
},
{
"epoch": 1.1175548589341693,
"grad_norm": 0.4920850396156311,
"learning_rate": 4.8333100569903365e-06,
"loss": 0.4932,
"step": 237
},
{
"epoch": 1.1222570532915361,
"grad_norm": 0.492286741733551,
"learning_rate": 4.8308956568130094e-06,
"loss": 0.5144,
"step": 238
},
{
"epoch": 1.1269592476489028,
"grad_norm": 0.5429807901382446,
"learning_rate": 4.828464508475934e-06,
"loss": 0.5054,
"step": 239
},
{
"epoch": 1.1316614420062696,
"grad_norm": 2.4671998023986816,
"learning_rate": 4.826016629447616e-06,
"loss": 0.5073,
"step": 240
},
{
"epoch": 1.1363636363636362,
"grad_norm": 0.4593118131160736,
"learning_rate": 4.823552037316775e-06,
"loss": 0.4856,
"step": 241
},
{
"epoch": 1.141065830721003,
"grad_norm": 0.6855646371841431,
"learning_rate": 4.821070749792218e-06,
"loss": 0.5388,
"step": 242
},
{
"epoch": 1.14576802507837,
"grad_norm": 0.5722374320030212,
"learning_rate": 4.818572784702713e-06,
"loss": 0.51,
"step": 243
},
{
"epoch": 1.1504702194357366,
"grad_norm": 0.4901357591152191,
"learning_rate": 4.816058159996863e-06,
"loss": 0.5201,
"step": 244
},
{
"epoch": 1.1551724137931034,
"grad_norm": 0.4655209481716156,
"learning_rate": 4.813526893742972e-06,
"loss": 0.501,
"step": 245
},
{
"epoch": 1.1598746081504703,
"grad_norm": 0.7608394622802734,
"learning_rate": 4.810979004128924e-06,
"loss": 0.4961,
"step": 246
},
{
"epoch": 1.164576802507837,
"grad_norm": 0.4857081472873688,
"learning_rate": 4.808414509462042e-06,
"loss": 0.5174,
"step": 247
},
{
"epoch": 1.1692789968652038,
"grad_norm": 0.46672946214675903,
"learning_rate": 4.80583342816896e-06,
"loss": 0.484,
"step": 248
},
{
"epoch": 1.1739811912225706,
"grad_norm": 0.46982088685035706,
"learning_rate": 4.803235778795496e-06,
"loss": 0.5236,
"step": 249
},
{
"epoch": 1.1786833855799372,
"grad_norm": 0.5086098909378052,
"learning_rate": 4.800621580006511e-06,
"loss": 0.4673,
"step": 250
},
{
"epoch": 1.183385579937304,
"grad_norm": 0.45968860387802124,
"learning_rate": 4.797990850585782e-06,
"loss": 0.5151,
"step": 251
},
{
"epoch": 1.188087774294671,
"grad_norm": 0.49544984102249146,
"learning_rate": 4.79534360943586e-06,
"loss": 0.494,
"step": 252
},
{
"epoch": 1.1927899686520376,
"grad_norm": 0.531892716884613,
"learning_rate": 4.792679875577937e-06,
"loss": 0.4778,
"step": 253
},
{
"epoch": 1.1974921630094044,
"grad_norm": 0.5013542175292969,
"learning_rate": 4.789999668151714e-06,
"loss": 0.5132,
"step": 254
},
{
"epoch": 1.2021943573667713,
"grad_norm": 0.46963250637054443,
"learning_rate": 4.7873030064152545e-06,
"loss": 0.4938,
"step": 255
},
{
"epoch": 1.206896551724138,
"grad_norm": 0.465285986661911,
"learning_rate": 4.784589909744856e-06,
"loss": 0.4898,
"step": 256
},
{
"epoch": 1.2115987460815048,
"grad_norm": 0.5183936357498169,
"learning_rate": 4.7818603976349005e-06,
"loss": 0.5004,
"step": 257
},
{
"epoch": 1.2163009404388714,
"grad_norm": 0.47324836254119873,
"learning_rate": 4.779114489697724e-06,
"loss": 0.4972,
"step": 258
},
{
"epoch": 1.2210031347962382,
"grad_norm": 0.5208264589309692,
"learning_rate": 4.776352205663469e-06,
"loss": 0.5023,
"step": 259
},
{
"epoch": 1.225705329153605,
"grad_norm": 0.5583804845809937,
"learning_rate": 4.773573565379947e-06,
"loss": 0.5099,
"step": 260
},
{
"epoch": 1.2304075235109717,
"grad_norm": 0.5016160011291504,
"learning_rate": 4.770778588812489e-06,
"loss": 0.4765,
"step": 261
},
{
"epoch": 1.2351097178683386,
"grad_norm": 0.50210040807724,
"learning_rate": 4.7679672960438135e-06,
"loss": 0.5029,
"step": 262
},
{
"epoch": 1.2398119122257054,
"grad_norm": 0.6636150479316711,
"learning_rate": 4.765139707273872e-06,
"loss": 0.4909,
"step": 263
},
{
"epoch": 1.244514106583072,
"grad_norm": 0.4798625111579895,
"learning_rate": 4.762295842819707e-06,
"loss": 0.5012,
"step": 264
},
{
"epoch": 1.249216300940439,
"grad_norm": 0.5282374024391174,
"learning_rate": 4.759435723115308e-06,
"loss": 0.4681,
"step": 265
},
{
"epoch": 1.2539184952978055,
"grad_norm": 0.5356930494308472,
"learning_rate": 4.756559368711463e-06,
"loss": 0.506,
"step": 266
},
{
"epoch": 1.2586206896551724,
"grad_norm": 0.4857093095779419,
"learning_rate": 4.75366680027561e-06,
"loss": 0.4889,
"step": 267
},
{
"epoch": 1.2633228840125392,
"grad_norm": 0.484018474817276,
"learning_rate": 4.7507580385916906e-06,
"loss": 0.4899,
"step": 268
},
{
"epoch": 1.2680250783699059,
"grad_norm": 0.49720871448516846,
"learning_rate": 4.747833104559999e-06,
"loss": 0.4654,
"step": 269
},
{
"epoch": 1.2727272727272727,
"grad_norm": 0.4631911516189575,
"learning_rate": 4.744892019197033e-06,
"loss": 0.4796,
"step": 270
},
{
"epoch": 1.2774294670846396,
"grad_norm": 0.5116872787475586,
"learning_rate": 4.74193480363534e-06,
"loss": 0.4883,
"step": 271
},
{
"epoch": 1.2821316614420062,
"grad_norm": 0.5275093913078308,
"learning_rate": 4.738961479123373e-06,
"loss": 0.496,
"step": 272
},
{
"epoch": 1.286833855799373,
"grad_norm": 0.5001885890960693,
"learning_rate": 4.735972067025326e-06,
"loss": 0.5012,
"step": 273
},
{
"epoch": 1.29153605015674,
"grad_norm": 0.5875861048698425,
"learning_rate": 4.732966588820991e-06,
"loss": 0.4951,
"step": 274
},
{
"epoch": 1.2962382445141065,
"grad_norm": 0.4893011748790741,
"learning_rate": 4.729945066105599e-06,
"loss": 0.4742,
"step": 275
},
{
"epoch": 1.3009404388714734,
"grad_norm": 0.4648543894290924,
"learning_rate": 4.726907520589664e-06,
"loss": 0.466,
"step": 276
},
{
"epoch": 1.3056426332288402,
"grad_norm": 0.5300162434577942,
"learning_rate": 4.72385397409883e-06,
"loss": 0.5072,
"step": 277
},
{
"epoch": 1.3103448275862069,
"grad_norm": 0.4667080044746399,
"learning_rate": 4.720784448573712e-06,
"loss": 0.4986,
"step": 278
},
{
"epoch": 1.3150470219435737,
"grad_norm": 0.5278895497322083,
"learning_rate": 4.717698966069739e-06,
"loss": 0.5269,
"step": 279
},
{
"epoch": 1.3197492163009406,
"grad_norm": 0.5325866937637329,
"learning_rate": 4.7145975487569965e-06,
"loss": 0.5074,
"step": 280
},
{
"epoch": 1.3244514106583072,
"grad_norm": 0.500861644744873,
"learning_rate": 4.711480218920064e-06,
"loss": 0.4695,
"step": 281
},
{
"epoch": 1.329153605015674,
"grad_norm": 0.5263222455978394,
"learning_rate": 4.708346998957859e-06,
"loss": 0.5173,
"step": 282
},
{
"epoch": 1.3338557993730409,
"grad_norm": 0.622900128364563,
"learning_rate": 4.705197911383473e-06,
"loss": 0.4905,
"step": 283
},
{
"epoch": 1.3385579937304075,
"grad_norm": 0.49273768067359924,
"learning_rate": 4.7020329788240115e-06,
"loss": 0.4743,
"step": 284
},
{
"epoch": 1.3432601880877744,
"grad_norm": 0.49558964371681213,
"learning_rate": 4.6988522240204325e-06,
"loss": 0.4824,
"step": 285
},
{
"epoch": 1.347962382445141,
"grad_norm": 0.4743976891040802,
"learning_rate": 4.695655669827377e-06,
"loss": 0.4977,
"step": 286
},
{
"epoch": 1.3526645768025078,
"grad_norm": 0.49542659521102905,
"learning_rate": 4.6924433392130135e-06,
"loss": 0.4924,
"step": 287
},
{
"epoch": 1.3573667711598745,
"grad_norm": 0.7385990619659424,
"learning_rate": 4.689215255258866e-06,
"loss": 0.5091,
"step": 288
},
{
"epoch": 1.3620689655172413,
"grad_norm": 0.4826123118400574,
"learning_rate": 4.685971441159653e-06,
"loss": 0.4791,
"step": 289
},
{
"epoch": 1.3667711598746082,
"grad_norm": 0.5389033555984497,
"learning_rate": 4.682711920223115e-06,
"loss": 0.4751,
"step": 290
},
{
"epoch": 1.3714733542319748,
"grad_norm": 0.5059546232223511,
"learning_rate": 4.679436715869856e-06,
"loss": 0.499,
"step": 291
},
{
"epoch": 1.3761755485893417,
"grad_norm": 0.5682849884033203,
"learning_rate": 4.676145851633166e-06,
"loss": 0.5143,
"step": 292
},
{
"epoch": 1.3808777429467085,
"grad_norm": 0.4754337668418884,
"learning_rate": 4.672839351158856e-06,
"loss": 0.4997,
"step": 293
},
{
"epoch": 1.3855799373040751,
"grad_norm": 0.5227643847465515,
"learning_rate": 4.669517238205089e-06,
"loss": 0.4834,
"step": 294
},
{
"epoch": 1.390282131661442,
"grad_norm": 0.4954044222831726,
"learning_rate": 4.666179536642208e-06,
"loss": 0.483,
"step": 295
},
{
"epoch": 1.3949843260188088,
"grad_norm": 0.4909021556377411,
"learning_rate": 4.662826270452565e-06,
"loss": 0.4808,
"step": 296
},
{
"epoch": 1.3996865203761755,
"grad_norm": 0.4666971266269684,
"learning_rate": 4.659457463730347e-06,
"loss": 0.488,
"step": 297
},
{
"epoch": 1.4043887147335423,
"grad_norm": 0.5064187049865723,
"learning_rate": 4.6560731406814056e-06,
"loss": 0.5046,
"step": 298
},
{
"epoch": 1.4090909090909092,
"grad_norm": 0.4958318769931793,
"learning_rate": 4.65267332562308e-06,
"loss": 0.5102,
"step": 299
},
{
"epoch": 1.4137931034482758,
"grad_norm": 0.5080632567405701,
"learning_rate": 4.649258042984026e-06,
"loss": 0.5055,
"step": 300
},
{
"epoch": 1.4184952978056427,
"grad_norm": 0.46236541867256165,
"learning_rate": 4.6458273173040395e-06,
"loss": 0.4606,
"step": 301
},
{
"epoch": 1.4231974921630095,
"grad_norm": 1.8524898290634155,
"learning_rate": 4.642381173233874e-06,
"loss": 0.5002,
"step": 302
},
{
"epoch": 1.4278996865203761,
"grad_norm": 0.5202615261077881,
"learning_rate": 4.638919635535073e-06,
"loss": 0.4562,
"step": 303
},
{
"epoch": 1.432601880877743,
"grad_norm": 0.5293647050857544,
"learning_rate": 4.635442729079788e-06,
"loss": 0.4806,
"step": 304
},
{
"epoch": 1.4373040752351098,
"grad_norm": 0.5165356993675232,
"learning_rate": 4.6319504788505956e-06,
"loss": 0.4775,
"step": 305
},
{
"epoch": 1.4420062695924765,
"grad_norm": 0.5092841386795044,
"learning_rate": 4.628442909940325e-06,
"loss": 0.4892,
"step": 306
},
{
"epoch": 1.4467084639498433,
"grad_norm": 0.511424720287323,
"learning_rate": 4.624920047551874e-06,
"loss": 0.506,
"step": 307
},
{
"epoch": 1.4514106583072102,
"grad_norm": 0.5631566643714905,
"learning_rate": 4.621381916998029e-06,
"loss": 0.4741,
"step": 308
},
{
"epoch": 1.4561128526645768,
"grad_norm": 0.4748315215110779,
"learning_rate": 4.6178285437012806e-06,
"loss": 0.5084,
"step": 309
},
{
"epoch": 1.4608150470219436,
"grad_norm": 0.47158119082450867,
"learning_rate": 4.6142599531936435e-06,
"loss": 0.4697,
"step": 310
},
{
"epoch": 1.4655172413793103,
"grad_norm": 0.5358107089996338,
"learning_rate": 4.610676171116475e-06,
"loss": 0.491,
"step": 311
},
{
"epoch": 1.4702194357366771,
"grad_norm": 0.47717440128326416,
"learning_rate": 4.607077223220286e-06,
"loss": 0.4948,
"step": 312
},
{
"epoch": 1.4749216300940438,
"grad_norm": 0.5041193962097168,
"learning_rate": 4.603463135364556e-06,
"loss": 0.4648,
"step": 313
},
{
"epoch": 1.4796238244514106,
"grad_norm": 0.9311274290084839,
"learning_rate": 4.5998339335175555e-06,
"loss": 0.4866,
"step": 314
},
{
"epoch": 1.4843260188087775,
"grad_norm": 0.47408604621887207,
"learning_rate": 4.596189643756147e-06,
"loss": 0.4634,
"step": 315
},
{
"epoch": 1.489028213166144,
"grad_norm": 0.5052632093429565,
"learning_rate": 4.592530292265609e-06,
"loss": 0.4843,
"step": 316
},
{
"epoch": 1.493730407523511,
"grad_norm": 0.5100846886634827,
"learning_rate": 4.58885590533944e-06,
"loss": 0.4942,
"step": 317
},
{
"epoch": 1.4984326018808778,
"grad_norm": 0.5132214426994324,
"learning_rate": 4.585166509379173e-06,
"loss": 0.5135,
"step": 318
},
{
"epoch": 1.5031347962382444,
"grad_norm": 11.112855911254883,
"learning_rate": 4.581462130894186e-06,
"loss": 0.4933,
"step": 319
},
{
"epoch": 1.5078369905956113,
"grad_norm": 0.4873805642127991,
"learning_rate": 4.57774279650151e-06,
"loss": 0.483,
"step": 320
},
{
"epoch": 1.5125391849529781,
"grad_norm": 0.5026459693908691,
"learning_rate": 4.574008532925638e-06,
"loss": 0.5075,
"step": 321
},
{
"epoch": 1.5172413793103448,
"grad_norm": 0.489947110414505,
"learning_rate": 4.570259366998336e-06,
"loss": 0.4954,
"step": 322
},
{
"epoch": 1.5219435736677116,
"grad_norm": 0.48120853304862976,
"learning_rate": 4.566495325658445e-06,
"loss": 0.5221,
"step": 323
},
{
"epoch": 1.5266457680250785,
"grad_norm": 0.4880066514015198,
"learning_rate": 4.5627164359516915e-06,
"loss": 0.5031,
"step": 324
},
{
"epoch": 1.531347962382445,
"grad_norm": 0.5048410892486572,
"learning_rate": 4.558922725030491e-06,
"loss": 0.4757,
"step": 325
},
{
"epoch": 1.536050156739812,
"grad_norm": 0.7033756375312805,
"learning_rate": 4.555114220153755e-06,
"loss": 0.4285,
"step": 326
},
{
"epoch": 1.5407523510971788,
"grad_norm": 0.4716516435146332,
"learning_rate": 4.551290948686693e-06,
"loss": 0.5121,
"step": 327
},
{
"epoch": 1.5454545454545454,
"grad_norm": 0.4782696068286896,
"learning_rate": 4.547452938100615e-06,
"loss": 0.5176,
"step": 328
},
{
"epoch": 1.5501567398119123,
"grad_norm": 0.5119273066520691,
"learning_rate": 4.54360021597274e-06,
"loss": 0.4941,
"step": 329
},
{
"epoch": 1.5548589341692791,
"grad_norm": 0.5010069608688354,
"learning_rate": 4.539732809985989e-06,
"loss": 0.4862,
"step": 330
},
{
"epoch": 1.5595611285266457,
"grad_norm": 0.5129932165145874,
"learning_rate": 4.535850747928796e-06,
"loss": 0.4978,
"step": 331
},
{
"epoch": 1.5642633228840124,
"grad_norm": 0.4957594573497772,
"learning_rate": 4.531954057694897e-06,
"loss": 0.4814,
"step": 332
},
{
"epoch": 1.5689655172413794,
"grad_norm": 0.5642824172973633,
"learning_rate": 4.5280427672831414e-06,
"loss": 0.4888,
"step": 333
},
{
"epoch": 1.573667711598746,
"grad_norm": 0.4562854468822479,
"learning_rate": 4.524116904797281e-06,
"loss": 0.4648,
"step": 334
},
{
"epoch": 1.5783699059561127,
"grad_norm": 0.4849218428134918,
"learning_rate": 4.520176498445774e-06,
"loss": 0.476,
"step": 335
},
{
"epoch": 1.5830721003134798,
"grad_norm": 0.5046947002410889,
"learning_rate": 4.516221576541581e-06,
"loss": 0.4776,
"step": 336
},
{
"epoch": 1.5877742946708464,
"grad_norm": 0.48211777210235596,
"learning_rate": 4.512252167501959e-06,
"loss": 0.479,
"step": 337
},
{
"epoch": 1.592476489028213,
"grad_norm": 0.4812171459197998,
"learning_rate": 4.508268299848262e-06,
"loss": 0.4849,
"step": 338
},
{
"epoch": 1.59717868338558,
"grad_norm": 0.5865142345428467,
"learning_rate": 4.50427000220573e-06,
"loss": 0.499,
"step": 339
},
{
"epoch": 1.6018808777429467,
"grad_norm": 0.49277785420417786,
"learning_rate": 4.50025730330329e-06,
"loss": 0.475,
"step": 340
},
{
"epoch": 1.6065830721003134,
"grad_norm": 0.46771496534347534,
"learning_rate": 4.4962302319733445e-06,
"loss": 0.494,
"step": 341
},
{
"epoch": 1.6112852664576802,
"grad_norm": 0.5189441442489624,
"learning_rate": 4.492188817151565e-06,
"loss": 0.5275,
"step": 342
},
{
"epoch": 1.615987460815047,
"grad_norm": 0.48845574259757996,
"learning_rate": 4.488133087876688e-06,
"loss": 0.4676,
"step": 343
},
{
"epoch": 1.6206896551724137,
"grad_norm": 0.47189632058143616,
"learning_rate": 4.484063073290301e-06,
"loss": 0.4642,
"step": 344
},
{
"epoch": 1.6253918495297806,
"grad_norm": 0.5442587733268738,
"learning_rate": 4.479978802636637e-06,
"loss": 0.4981,
"step": 345
},
{
"epoch": 1.6300940438871474,
"grad_norm": 0.5048685073852539,
"learning_rate": 4.475880305262362e-06,
"loss": 0.5037,
"step": 346
},
{
"epoch": 1.634796238244514,
"grad_norm": 0.4781409800052643,
"learning_rate": 4.471767610616366e-06,
"loss": 0.4932,
"step": 347
},
{
"epoch": 1.6394984326018809,
"grad_norm": 0.47388938069343567,
"learning_rate": 4.467640748249549e-06,
"loss": 0.4687,
"step": 348
},
{
"epoch": 1.6442006269592477,
"grad_norm": 0.529712438583374,
"learning_rate": 4.4634997478146125e-06,
"loss": 0.487,
"step": 349
},
{
"epoch": 1.6489028213166144,
"grad_norm": 0.5114791393280029,
"learning_rate": 4.459344639065842e-06,
"loss": 0.4809,
"step": 350
},
{
"epoch": 1.6536050156739812,
"grad_norm": 0.45415258407592773,
"learning_rate": 4.455175451858897e-06,
"loss": 0.4901,
"step": 351
},
{
"epoch": 1.658307210031348,
"grad_norm": 0.5842339396476746,
"learning_rate": 4.450992216150592e-06,
"loss": 0.499,
"step": 352
},
{
"epoch": 1.6630094043887147,
"grad_norm": 0.48795560002326965,
"learning_rate": 4.446794961998689e-06,
"loss": 0.4659,
"step": 353
},
{
"epoch": 1.6677115987460815,
"grad_norm": 0.5531855225563049,
"learning_rate": 4.442583719561671e-06,
"loss": 0.4923,
"step": 354
},
{
"epoch": 1.6724137931034484,
"grad_norm": 0.5827644467353821,
"learning_rate": 4.438358519098536e-06,
"loss": 0.4991,
"step": 355
},
{
"epoch": 1.677115987460815,
"grad_norm": 0.5260423421859741,
"learning_rate": 4.4341193909685685e-06,
"loss": 0.4843,
"step": 356
},
{
"epoch": 1.6818181818181817,
"grad_norm": 0.4969344437122345,
"learning_rate": 4.429866365631134e-06,
"loss": 0.4915,
"step": 357
},
{
"epoch": 1.6865203761755487,
"grad_norm": 0.4725005030632019,
"learning_rate": 4.425599473645447e-06,
"loss": 0.4804,
"step": 358
},
{
"epoch": 1.6912225705329154,
"grad_norm": 0.47171467542648315,
"learning_rate": 4.421318745670364e-06,
"loss": 0.4823,
"step": 359
},
{
"epoch": 1.695924764890282,
"grad_norm": 0.4839799106121063,
"learning_rate": 4.4170242124641524e-06,
"loss": 0.4585,
"step": 360
},
{
"epoch": 1.700626959247649,
"grad_norm": 0.4786856472492218,
"learning_rate": 4.412715904884277e-06,
"loss": 0.49,
"step": 361
},
{
"epoch": 1.7053291536050157,
"grad_norm": 0.49980080127716064,
"learning_rate": 4.4083938538871735e-06,
"loss": 0.4675,
"step": 362
},
{
"epoch": 1.7100313479623823,
"grad_norm": 0.5201369524002075,
"learning_rate": 4.4040580905280295e-06,
"loss": 0.4862,
"step": 363
},
{
"epoch": 1.7147335423197492,
"grad_norm": 0.7051575183868408,
"learning_rate": 4.3997086459605586e-06,
"loss": 0.4822,
"step": 364
},
{
"epoch": 1.719435736677116,
"grad_norm": 0.48206666111946106,
"learning_rate": 4.395345551436779e-06,
"loss": 0.5076,
"step": 365
},
{
"epoch": 1.7241379310344827,
"grad_norm": 0.4817257821559906,
"learning_rate": 4.390968838306788e-06,
"loss": 0.4623,
"step": 366
},
{
"epoch": 1.7288401253918495,
"grad_norm": 0.5547840595245361,
"learning_rate": 4.386578538018535e-06,
"loss": 0.461,
"step": 367
},
{
"epoch": 1.7335423197492164,
"grad_norm": 0.5085346698760986,
"learning_rate": 4.382174682117598e-06,
"loss": 0.5068,
"step": 368
},
{
"epoch": 1.738244514106583,
"grad_norm": 0.4870692193508148,
"learning_rate": 4.377757302246956e-06,
"loss": 0.4403,
"step": 369
},
{
"epoch": 1.7429467084639498,
"grad_norm": 0.49482715129852295,
"learning_rate": 4.373326430146762e-06,
"loss": 0.4986,
"step": 370
},
{
"epoch": 1.7476489028213167,
"grad_norm": 0.5474854707717896,
"learning_rate": 4.368882097654113e-06,
"loss": 0.4938,
"step": 371
},
{
"epoch": 1.7523510971786833,
"grad_norm": 0.5055244565010071,
"learning_rate": 4.364424336702825e-06,
"loss": 0.4711,
"step": 372
},
{
"epoch": 1.7570532915360502,
"grad_norm": 0.48241329193115234,
"learning_rate": 4.3599531793232e-06,
"loss": 0.4856,
"step": 373
},
{
"epoch": 1.761755485893417,
"grad_norm": 0.4932602047920227,
"learning_rate": 4.355468657641797e-06,
"loss": 0.4818,
"step": 374
},
{
"epoch": 1.7664576802507836,
"grad_norm": 0.5512160658836365,
"learning_rate": 4.3509708038812035e-06,
"loss": 0.4864,
"step": 375
},
{
"epoch": 1.7711598746081505,
"grad_norm": 0.47026327252388,
"learning_rate": 4.346459650359798e-06,
"loss": 0.4825,
"step": 376
},
{
"epoch": 1.7758620689655173,
"grad_norm": 0.4831086993217468,
"learning_rate": 4.341935229491525e-06,
"loss": 0.4541,
"step": 377
},
{
"epoch": 1.780564263322884,
"grad_norm": 0.5045217871665955,
"learning_rate": 4.337397573785659e-06,
"loss": 0.5025,
"step": 378
},
{
"epoch": 1.7852664576802508,
"grad_norm": 0.5657753348350525,
"learning_rate": 4.332846715846566e-06,
"loss": 0.4698,
"step": 379
},
{
"epoch": 1.7899686520376177,
"grad_norm": 0.49546748399734497,
"learning_rate": 4.328282688373479e-06,
"loss": 0.4911,
"step": 380
},
{
"epoch": 1.7946708463949843,
"grad_norm": 0.5037291049957275,
"learning_rate": 4.323705524160258e-06,
"loss": 0.4877,
"step": 381
},
{
"epoch": 1.799373040752351,
"grad_norm": 0.5256901383399963,
"learning_rate": 4.319115256095149e-06,
"loss": 0.4662,
"step": 382
},
{
"epoch": 1.804075235109718,
"grad_norm": 0.4890702962875366,
"learning_rate": 4.314511917160557e-06,
"loss": 0.4683,
"step": 383
},
{
"epoch": 1.8087774294670846,
"grad_norm": 0.4724109470844269,
"learning_rate": 4.3098955404328045e-06,
"loss": 0.4602,
"step": 384
},
{
"epoch": 1.8134796238244513,
"grad_norm": 0.4933278560638428,
"learning_rate": 4.305266159081895e-06,
"loss": 0.4806,
"step": 385
},
{
"epoch": 1.8181818181818183,
"grad_norm": 0.5068219304084778,
"learning_rate": 4.3006238063712725e-06,
"loss": 0.4647,
"step": 386
},
{
"epoch": 1.822884012539185,
"grad_norm": 0.5293509364128113,
"learning_rate": 4.295968515657583e-06,
"loss": 0.4998,
"step": 387
},
{
"epoch": 1.8275862068965516,
"grad_norm": 0.4775199294090271,
"learning_rate": 4.29130032039044e-06,
"loss": 0.4821,
"step": 388
},
{
"epoch": 1.8322884012539185,
"grad_norm": 0.4914006292819977,
"learning_rate": 4.2866192541121755e-06,
"loss": 0.4735,
"step": 389
},
{
"epoch": 1.8369905956112853,
"grad_norm": 0.5009908080101013,
"learning_rate": 4.281925350457606e-06,
"loss": 0.4741,
"step": 390
},
{
"epoch": 1.841692789968652,
"grad_norm": 0.47211164236068726,
"learning_rate": 4.277218643153787e-06,
"loss": 0.4786,
"step": 391
},
{
"epoch": 1.8463949843260188,
"grad_norm": 1.9644113779067993,
"learning_rate": 4.272499166019771e-06,
"loss": 0.4759,
"step": 392
},
{
"epoch": 1.8510971786833856,
"grad_norm": 0.535971999168396,
"learning_rate": 4.267766952966369e-06,
"loss": 0.4665,
"step": 393
},
{
"epoch": 1.8557993730407523,
"grad_norm": 0.4666787385940552,
"learning_rate": 4.2630220379959006e-06,
"loss": 0.4417,
"step": 394
},
{
"epoch": 1.8605015673981191,
"grad_norm": 0.5976264476776123,
"learning_rate": 4.258264455201953e-06,
"loss": 0.4665,
"step": 395
},
{
"epoch": 1.865203761755486,
"grad_norm": 0.4814331531524658,
"learning_rate": 4.2534942387691335e-06,
"loss": 0.4896,
"step": 396
},
{
"epoch": 1.8699059561128526,
"grad_norm": 0.4929859936237335,
"learning_rate": 4.248711422972829e-06,
"loss": 0.4765,
"step": 397
},
{
"epoch": 1.8746081504702194,
"grad_norm": 0.517914354801178,
"learning_rate": 4.243916042178954e-06,
"loss": 0.4601,
"step": 398
},
{
"epoch": 1.8793103448275863,
"grad_norm": 0.47731271386146545,
"learning_rate": 4.239108130843709e-06,
"loss": 0.469,
"step": 399
},
{
"epoch": 1.884012539184953,
"grad_norm": 0.4939954876899719,
"learning_rate": 4.234287723513326e-06,
"loss": 0.4929,
"step": 400
},
{
"epoch": 1.8887147335423198,
"grad_norm": 0.48573923110961914,
"learning_rate": 4.229454854823827e-06,
"loss": 0.4913,
"step": 401
},
{
"epoch": 1.8934169278996866,
"grad_norm": 0.5146409273147583,
"learning_rate": 4.224609559500772e-06,
"loss": 0.502,
"step": 402
},
{
"epoch": 1.8981191222570533,
"grad_norm": 0.4884675443172455,
"learning_rate": 4.21975187235901e-06,
"loss": 0.4541,
"step": 403
},
{
"epoch": 1.90282131661442,
"grad_norm": 0.4871810972690582,
"learning_rate": 4.21488182830243e-06,
"loss": 0.4811,
"step": 404
},
{
"epoch": 1.907523510971787,
"grad_norm": 0.5089552402496338,
"learning_rate": 4.209999462323706e-06,
"loss": 0.4584,
"step": 405
},
{
"epoch": 1.9122257053291536,
"grad_norm": 0.6191231608390808,
"learning_rate": 4.20510480950405e-06,
"loss": 0.4885,
"step": 406
},
{
"epoch": 1.9169278996865202,
"grad_norm": 0.5512096285820007,
"learning_rate": 4.200197905012961e-06,
"loss": 0.4529,
"step": 407
},
{
"epoch": 1.9216300940438873,
"grad_norm": 0.4743112027645111,
"learning_rate": 4.195278784107965e-06,
"loss": 0.4702,
"step": 408
},
{
"epoch": 1.926332288401254,
"grad_norm": 0.4635118544101715,
"learning_rate": 4.19034748213437e-06,
"loss": 0.4718,
"step": 409
},
{
"epoch": 1.9310344827586206,
"grad_norm": 0.48715919256210327,
"learning_rate": 4.185404034525008e-06,
"loss": 0.4638,
"step": 410
},
{
"epoch": 1.9357366771159876,
"grad_norm": 0.5373724102973938,
"learning_rate": 4.180448476799981e-06,
"loss": 0.5009,
"step": 411
},
{
"epoch": 1.9404388714733543,
"grad_norm": 0.4978715479373932,
"learning_rate": 4.175480844566404e-06,
"loss": 0.4726,
"step": 412
},
{
"epoch": 1.9451410658307209,
"grad_norm": 0.44817060232162476,
"learning_rate": 4.170501173518152e-06,
"loss": 0.4683,
"step": 413
},
{
"epoch": 1.9498432601880877,
"grad_norm": 0.48472973704338074,
"learning_rate": 4.165509499435604e-06,
"loss": 0.4662,
"step": 414
},
{
"epoch": 1.9545454545454546,
"grad_norm": 0.6567174792289734,
"learning_rate": 4.16050585818538e-06,
"loss": 0.4801,
"step": 415
},
{
"epoch": 1.9592476489028212,
"grad_norm": 0.5131425857543945,
"learning_rate": 4.155490285720092e-06,
"loss": 0.5036,
"step": 416
},
{
"epoch": 1.963949843260188,
"grad_norm": 0.46051982045173645,
"learning_rate": 4.150462818078079e-06,
"loss": 0.4911,
"step": 417
},
{
"epoch": 1.968652037617555,
"grad_norm": 0.5288883447647095,
"learning_rate": 4.145423491383153e-06,
"loss": 0.4871,
"step": 418
},
{
"epoch": 1.9733542319749215,
"grad_norm": 0.5143817663192749,
"learning_rate": 4.14037234184433e-06,
"loss": 0.5027,
"step": 419
},
{
"epoch": 1.9780564263322884,
"grad_norm": 0.46323707699775696,
"learning_rate": 4.135309405755583e-06,
"loss": 0.4876,
"step": 420
},
{
"epoch": 1.9827586206896552,
"grad_norm": 0.5239706039428711,
"learning_rate": 4.130234719495574e-06,
"loss": 0.4702,
"step": 421
},
{
"epoch": 1.9874608150470219,
"grad_norm": 0.538753867149353,
"learning_rate": 4.125148319527391e-06,
"loss": 0.4638,
"step": 422
},
{
"epoch": 1.9921630094043887,
"grad_norm": 0.5180181860923767,
"learning_rate": 4.1200502423982904e-06,
"loss": 0.4841,
"step": 423
},
{
"epoch": 1.9968652037617556,
"grad_norm": 0.6698167324066162,
"learning_rate": 4.1149405247394295e-06,
"loss": 0.4882,
"step": 424
}
],
"logging_steps": 1,
"max_steps": 1272,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 212,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 2.7298140993196392e+19,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}