model_2d3a11bd / checkpoint-690 /trainer_state.json
ugaoo's picture
Upload folder using huggingface_hub
469456c verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.994236311239193,
"eval_steps": 500,
"global_step": 690,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.008645533141210375,
"grad_norm": 34.72848892211914,
"learning_rate": 5.0000000000000004e-08,
"loss": 2.477,
"step": 1
},
{
"epoch": 0.01729106628242075,
"grad_norm": 33.733909606933594,
"learning_rate": 1.0000000000000001e-07,
"loss": 2.4134,
"step": 2
},
{
"epoch": 0.025936599423631124,
"grad_norm": 34.543819427490234,
"learning_rate": 1.5000000000000002e-07,
"loss": 2.4467,
"step": 3
},
{
"epoch": 0.0345821325648415,
"grad_norm": 35.37831115722656,
"learning_rate": 2.0000000000000002e-07,
"loss": 2.4801,
"step": 4
},
{
"epoch": 0.043227665706051875,
"grad_norm": 33.97856140136719,
"learning_rate": 2.5000000000000004e-07,
"loss": 2.4422,
"step": 5
},
{
"epoch": 0.05187319884726225,
"grad_norm": 34.11160659790039,
"learning_rate": 3.0000000000000004e-07,
"loss": 2.4003,
"step": 6
},
{
"epoch": 0.06051873198847262,
"grad_norm": 34.086463928222656,
"learning_rate": 3.5000000000000004e-07,
"loss": 2.4211,
"step": 7
},
{
"epoch": 0.069164265129683,
"grad_norm": 33.96665573120117,
"learning_rate": 4.0000000000000003e-07,
"loss": 2.4054,
"step": 8
},
{
"epoch": 0.07780979827089338,
"grad_norm": 34.871307373046875,
"learning_rate": 4.5000000000000003e-07,
"loss": 2.4077,
"step": 9
},
{
"epoch": 0.08645533141210375,
"grad_norm": 33.91160583496094,
"learning_rate": 5.000000000000001e-07,
"loss": 2.3598,
"step": 10
},
{
"epoch": 0.09510086455331412,
"grad_norm": 33.40217971801758,
"learning_rate": 5.5e-07,
"loss": 2.3209,
"step": 11
},
{
"epoch": 0.1037463976945245,
"grad_norm": 33.771121978759766,
"learning_rate": 6.000000000000001e-07,
"loss": 2.3178,
"step": 12
},
{
"epoch": 0.11239193083573487,
"grad_norm": 33.577476501464844,
"learning_rate": 6.5e-07,
"loss": 2.3107,
"step": 13
},
{
"epoch": 0.12103746397694524,
"grad_norm": 31.330514907836914,
"learning_rate": 7.000000000000001e-07,
"loss": 2.095,
"step": 14
},
{
"epoch": 0.12968299711815562,
"grad_norm": 33.60646057128906,
"learning_rate": 7.5e-07,
"loss": 2.1663,
"step": 15
},
{
"epoch": 0.138328530259366,
"grad_norm": 31.96607208251953,
"learning_rate": 8.000000000000001e-07,
"loss": 1.9986,
"step": 16
},
{
"epoch": 0.14697406340057637,
"grad_norm": 32.33183288574219,
"learning_rate": 8.500000000000001e-07,
"loss": 1.9401,
"step": 17
},
{
"epoch": 0.15561959654178675,
"grad_norm": 33.50197219848633,
"learning_rate": 9.000000000000001e-07,
"loss": 1.8529,
"step": 18
},
{
"epoch": 0.1642651296829971,
"grad_norm": 33.55098342895508,
"learning_rate": 9.500000000000001e-07,
"loss": 1.7087,
"step": 19
},
{
"epoch": 0.1729106628242075,
"grad_norm": 33.90129852294922,
"learning_rate": 1.0000000000000002e-06,
"loss": 1.6324,
"step": 20
},
{
"epoch": 0.18155619596541786,
"grad_norm": 33.738037109375,
"learning_rate": 1.0500000000000001e-06,
"loss": 1.4896,
"step": 21
},
{
"epoch": 0.19020172910662825,
"grad_norm": 33.6258659362793,
"learning_rate": 1.1e-06,
"loss": 1.3664,
"step": 22
},
{
"epoch": 0.1988472622478386,
"grad_norm": 30.822349548339844,
"learning_rate": 1.1500000000000002e-06,
"loss": 1.2193,
"step": 23
},
{
"epoch": 0.207492795389049,
"grad_norm": 29.608501434326172,
"learning_rate": 1.2000000000000002e-06,
"loss": 1.074,
"step": 24
},
{
"epoch": 0.21613832853025935,
"grad_norm": 27.651105880737305,
"learning_rate": 1.25e-06,
"loss": 0.938,
"step": 25
},
{
"epoch": 0.22478386167146974,
"grad_norm": 29.479637145996094,
"learning_rate": 1.3e-06,
"loss": 0.7728,
"step": 26
},
{
"epoch": 0.2334293948126801,
"grad_norm": 29.068634033203125,
"learning_rate": 1.3500000000000002e-06,
"loss": 0.6051,
"step": 27
},
{
"epoch": 0.2420749279538905,
"grad_norm": 24.850099563598633,
"learning_rate": 1.4000000000000001e-06,
"loss": 0.4463,
"step": 28
},
{
"epoch": 0.2507204610951009,
"grad_norm": 22.095216751098633,
"learning_rate": 1.45e-06,
"loss": 0.3489,
"step": 29
},
{
"epoch": 0.25936599423631124,
"grad_norm": 19.491201400756836,
"learning_rate": 1.5e-06,
"loss": 0.261,
"step": 30
},
{
"epoch": 0.2680115273775216,
"grad_norm": 14.492341041564941,
"learning_rate": 1.5500000000000002e-06,
"loss": 0.1707,
"step": 31
},
{
"epoch": 0.276657060518732,
"grad_norm": 6.020577907562256,
"learning_rate": 1.6000000000000001e-06,
"loss": 0.1151,
"step": 32
},
{
"epoch": 0.28530259365994237,
"grad_norm": 2.90791916847229,
"learning_rate": 1.6500000000000003e-06,
"loss": 0.0976,
"step": 33
},
{
"epoch": 0.29394812680115273,
"grad_norm": 2.637803554534912,
"learning_rate": 1.7000000000000002e-06,
"loss": 0.0911,
"step": 34
},
{
"epoch": 0.3025936599423631,
"grad_norm": 1.804861068725586,
"learning_rate": 1.75e-06,
"loss": 0.0834,
"step": 35
},
{
"epoch": 0.3112391930835735,
"grad_norm": 2.049024820327759,
"learning_rate": 1.8000000000000001e-06,
"loss": 0.0842,
"step": 36
},
{
"epoch": 0.31988472622478387,
"grad_norm": 1.3263498544692993,
"learning_rate": 1.85e-06,
"loss": 0.0744,
"step": 37
},
{
"epoch": 0.3285302593659942,
"grad_norm": 1.7187089920043945,
"learning_rate": 1.9000000000000002e-06,
"loss": 0.0783,
"step": 38
},
{
"epoch": 0.3371757925072046,
"grad_norm": 1.3925131559371948,
"learning_rate": 1.9500000000000004e-06,
"loss": 0.073,
"step": 39
},
{
"epoch": 0.345821325648415,
"grad_norm": 1.2181739807128906,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.0749,
"step": 40
},
{
"epoch": 0.35446685878962536,
"grad_norm": 1.0519245862960815,
"learning_rate": 2.05e-06,
"loss": 0.0692,
"step": 41
},
{
"epoch": 0.3631123919308357,
"grad_norm": 0.9188923835754395,
"learning_rate": 2.1000000000000002e-06,
"loss": 0.0726,
"step": 42
},
{
"epoch": 0.37175792507204614,
"grad_norm": 0.8273228406906128,
"learning_rate": 2.15e-06,
"loss": 0.0651,
"step": 43
},
{
"epoch": 0.3804034582132565,
"grad_norm": 0.9098994135856628,
"learning_rate": 2.2e-06,
"loss": 0.066,
"step": 44
},
{
"epoch": 0.38904899135446686,
"grad_norm": 0.8456838726997375,
"learning_rate": 2.25e-06,
"loss": 0.0646,
"step": 45
},
{
"epoch": 0.3976945244956772,
"grad_norm": 0.8240940570831299,
"learning_rate": 2.3000000000000004e-06,
"loss": 0.0604,
"step": 46
},
{
"epoch": 0.40634005763688763,
"grad_norm": 1.111759901046753,
"learning_rate": 2.35e-06,
"loss": 0.0589,
"step": 47
},
{
"epoch": 0.414985590778098,
"grad_norm": 0.9933035373687744,
"learning_rate": 2.4000000000000003e-06,
"loss": 0.0613,
"step": 48
},
{
"epoch": 0.42363112391930835,
"grad_norm": 0.7491716742515564,
"learning_rate": 2.4500000000000003e-06,
"loss": 0.0583,
"step": 49
},
{
"epoch": 0.4322766570605187,
"grad_norm": 0.9089523553848267,
"learning_rate": 2.5e-06,
"loss": 0.0558,
"step": 50
},
{
"epoch": 0.4409221902017291,
"grad_norm": 0.7088611721992493,
"learning_rate": 2.55e-06,
"loss": 0.0553,
"step": 51
},
{
"epoch": 0.4495677233429395,
"grad_norm": 0.7892571091651917,
"learning_rate": 2.6e-06,
"loss": 0.064,
"step": 52
},
{
"epoch": 0.45821325648414984,
"grad_norm": 0.9248467087745667,
"learning_rate": 2.6500000000000005e-06,
"loss": 0.0653,
"step": 53
},
{
"epoch": 0.4668587896253602,
"grad_norm": 0.7224969863891602,
"learning_rate": 2.7000000000000004e-06,
"loss": 0.0611,
"step": 54
},
{
"epoch": 0.4755043227665706,
"grad_norm": 0.8231533765792847,
"learning_rate": 2.7500000000000004e-06,
"loss": 0.0617,
"step": 55
},
{
"epoch": 0.484149855907781,
"grad_norm": 0.7306967973709106,
"learning_rate": 2.8000000000000003e-06,
"loss": 0.0571,
"step": 56
},
{
"epoch": 0.49279538904899134,
"grad_norm": 0.8415323495864868,
"learning_rate": 2.85e-06,
"loss": 0.0548,
"step": 57
},
{
"epoch": 0.5014409221902018,
"grad_norm": 1.5560295581817627,
"learning_rate": 2.9e-06,
"loss": 0.0672,
"step": 58
},
{
"epoch": 0.5100864553314121,
"grad_norm": 0.9170955419540405,
"learning_rate": 2.95e-06,
"loss": 0.068,
"step": 59
},
{
"epoch": 0.5187319884726225,
"grad_norm": 0.6508005857467651,
"learning_rate": 3e-06,
"loss": 0.057,
"step": 60
},
{
"epoch": 0.5273775216138329,
"grad_norm": 0.8307355642318726,
"learning_rate": 3.05e-06,
"loss": 0.0598,
"step": 61
},
{
"epoch": 0.5360230547550432,
"grad_norm": 1.11078679561615,
"learning_rate": 3.1000000000000004e-06,
"loss": 0.0575,
"step": 62
},
{
"epoch": 0.5446685878962536,
"grad_norm": 1.0765758752822876,
"learning_rate": 3.1500000000000003e-06,
"loss": 0.0615,
"step": 63
},
{
"epoch": 0.553314121037464,
"grad_norm": 0.8381508588790894,
"learning_rate": 3.2000000000000003e-06,
"loss": 0.0606,
"step": 64
},
{
"epoch": 0.5619596541786743,
"grad_norm": 1.007628321647644,
"learning_rate": 3.2500000000000002e-06,
"loss": 0.0572,
"step": 65
},
{
"epoch": 0.5706051873198847,
"grad_norm": 0.7254197597503662,
"learning_rate": 3.3000000000000006e-06,
"loss": 0.0543,
"step": 66
},
{
"epoch": 0.579250720461095,
"grad_norm": 0.5906903147697449,
"learning_rate": 3.3500000000000005e-06,
"loss": 0.0557,
"step": 67
},
{
"epoch": 0.5878962536023055,
"grad_norm": 0.6791537404060364,
"learning_rate": 3.4000000000000005e-06,
"loss": 0.0516,
"step": 68
},
{
"epoch": 0.5965417867435159,
"grad_norm": 0.6390945315361023,
"learning_rate": 3.45e-06,
"loss": 0.0529,
"step": 69
},
{
"epoch": 0.6051873198847262,
"grad_norm": 0.61552494764328,
"learning_rate": 3.5e-06,
"loss": 0.0554,
"step": 70
},
{
"epoch": 0.6138328530259366,
"grad_norm": 0.6545206308364868,
"learning_rate": 3.5500000000000003e-06,
"loss": 0.0545,
"step": 71
},
{
"epoch": 0.622478386167147,
"grad_norm": 0.9062793254852295,
"learning_rate": 3.6000000000000003e-06,
"loss": 0.0559,
"step": 72
},
{
"epoch": 0.6311239193083573,
"grad_norm": 0.9642562866210938,
"learning_rate": 3.65e-06,
"loss": 0.0485,
"step": 73
},
{
"epoch": 0.6397694524495677,
"grad_norm": 0.7286660075187683,
"learning_rate": 3.7e-06,
"loss": 0.0564,
"step": 74
},
{
"epoch": 0.6484149855907781,
"grad_norm": 0.6356053948402405,
"learning_rate": 3.7500000000000005e-06,
"loss": 0.053,
"step": 75
},
{
"epoch": 0.6570605187319885,
"grad_norm": 0.706794261932373,
"learning_rate": 3.8000000000000005e-06,
"loss": 0.0531,
"step": 76
},
{
"epoch": 0.6657060518731989,
"grad_norm": 0.6616448163986206,
"learning_rate": 3.85e-06,
"loss": 0.0504,
"step": 77
},
{
"epoch": 0.6743515850144092,
"grad_norm": 0.7465748190879822,
"learning_rate": 3.900000000000001e-06,
"loss": 0.0495,
"step": 78
},
{
"epoch": 0.6829971181556196,
"grad_norm": 0.8153467774391174,
"learning_rate": 3.95e-06,
"loss": 0.0495,
"step": 79
},
{
"epoch": 0.69164265129683,
"grad_norm": 0.7728897333145142,
"learning_rate": 4.000000000000001e-06,
"loss": 0.0434,
"step": 80
},
{
"epoch": 0.7002881844380403,
"grad_norm": 0.6173391938209534,
"learning_rate": 4.05e-06,
"loss": 0.0432,
"step": 81
},
{
"epoch": 0.7089337175792507,
"grad_norm": 0.7128047943115234,
"learning_rate": 4.1e-06,
"loss": 0.0512,
"step": 82
},
{
"epoch": 0.7175792507204611,
"grad_norm": 0.6098653674125671,
"learning_rate": 4.15e-06,
"loss": 0.0415,
"step": 83
},
{
"epoch": 0.7262247838616714,
"grad_norm": 0.7464293241500854,
"learning_rate": 4.2000000000000004e-06,
"loss": 0.0453,
"step": 84
},
{
"epoch": 0.7348703170028819,
"grad_norm": 0.8350300788879395,
"learning_rate": 4.25e-06,
"loss": 0.0463,
"step": 85
},
{
"epoch": 0.7435158501440923,
"grad_norm": 0.7880110740661621,
"learning_rate": 4.3e-06,
"loss": 0.0426,
"step": 86
},
{
"epoch": 0.7521613832853026,
"grad_norm": 0.77886962890625,
"learning_rate": 4.350000000000001e-06,
"loss": 0.0476,
"step": 87
},
{
"epoch": 0.760806916426513,
"grad_norm": 1.0404386520385742,
"learning_rate": 4.4e-06,
"loss": 0.0501,
"step": 88
},
{
"epoch": 0.7694524495677233,
"grad_norm": 0.5827208757400513,
"learning_rate": 4.450000000000001e-06,
"loss": 0.0396,
"step": 89
},
{
"epoch": 0.7780979827089337,
"grad_norm": 0.5928618907928467,
"learning_rate": 4.5e-06,
"loss": 0.0438,
"step": 90
},
{
"epoch": 0.7867435158501441,
"grad_norm": 0.5311946272850037,
"learning_rate": 4.5500000000000005e-06,
"loss": 0.0388,
"step": 91
},
{
"epoch": 0.7953890489913544,
"grad_norm": 0.7609073519706726,
"learning_rate": 4.600000000000001e-06,
"loss": 0.0403,
"step": 92
},
{
"epoch": 0.8040345821325648,
"grad_norm": 0.6055853962898254,
"learning_rate": 4.65e-06,
"loss": 0.0402,
"step": 93
},
{
"epoch": 0.8126801152737753,
"grad_norm": 0.8020023703575134,
"learning_rate": 4.7e-06,
"loss": 0.0415,
"step": 94
},
{
"epoch": 0.8213256484149856,
"grad_norm": 0.9083772301673889,
"learning_rate": 4.75e-06,
"loss": 0.0428,
"step": 95
},
{
"epoch": 0.829971181556196,
"grad_norm": 0.6658433079719543,
"learning_rate": 4.800000000000001e-06,
"loss": 0.0381,
"step": 96
},
{
"epoch": 0.8386167146974063,
"grad_norm": 0.925826907157898,
"learning_rate": 4.85e-06,
"loss": 0.0465,
"step": 97
},
{
"epoch": 0.8472622478386167,
"grad_norm": 0.5956787467002869,
"learning_rate": 4.9000000000000005e-06,
"loss": 0.0429,
"step": 98
},
{
"epoch": 0.8559077809798271,
"grad_norm": 0.8485273718833923,
"learning_rate": 4.95e-06,
"loss": 0.0465,
"step": 99
},
{
"epoch": 0.8645533141210374,
"grad_norm": 0.7477124333381653,
"learning_rate": 5e-06,
"loss": 0.0407,
"step": 100
},
{
"epoch": 0.8731988472622478,
"grad_norm": 0.624204695224762,
"learning_rate": 4.999964559102694e-06,
"loss": 0.0433,
"step": 101
},
{
"epoch": 0.8818443804034583,
"grad_norm": 0.6230912804603577,
"learning_rate": 4.999858237415621e-06,
"loss": 0.0364,
"step": 102
},
{
"epoch": 0.8904899135446686,
"grad_norm": 0.9286472201347351,
"learning_rate": 4.999681037953289e-06,
"loss": 0.0385,
"step": 103
},
{
"epoch": 0.899135446685879,
"grad_norm": 1.0528490543365479,
"learning_rate": 4.999432965739786e-06,
"loss": 0.0406,
"step": 104
},
{
"epoch": 0.9077809798270894,
"grad_norm": 1.1465263366699219,
"learning_rate": 4.999114027808632e-06,
"loss": 0.0526,
"step": 105
},
{
"epoch": 0.9164265129682997,
"grad_norm": 0.7173194885253906,
"learning_rate": 4.998724233202585e-06,
"loss": 0.05,
"step": 106
},
{
"epoch": 0.9250720461095101,
"grad_norm": 0.7150623798370361,
"learning_rate": 4.998263592973382e-06,
"loss": 0.0392,
"step": 107
},
{
"epoch": 0.9337175792507204,
"grad_norm": 0.6749414801597595,
"learning_rate": 4.9977321201814235e-06,
"loss": 0.0381,
"step": 108
},
{
"epoch": 0.9423631123919308,
"grad_norm": 0.7057478427886963,
"learning_rate": 4.997129829895409e-06,
"loss": 0.0427,
"step": 109
},
{
"epoch": 0.9510086455331412,
"grad_norm": 0.8370860815048218,
"learning_rate": 4.996456739191905e-06,
"loss": 0.0353,
"step": 110
},
{
"epoch": 0.9596541786743515,
"grad_norm": 0.8583172559738159,
"learning_rate": 4.995712867154863e-06,
"loss": 0.0366,
"step": 111
},
{
"epoch": 0.968299711815562,
"grad_norm": 0.9564568400382996,
"learning_rate": 4.994898234875075e-06,
"loss": 0.0463,
"step": 112
},
{
"epoch": 0.9769452449567724,
"grad_norm": 0.7943828105926514,
"learning_rate": 4.9940128654495826e-06,
"loss": 0.0366,
"step": 113
},
{
"epoch": 0.9855907780979827,
"grad_norm": 0.9387117624282837,
"learning_rate": 4.9930567839810125e-06,
"loss": 0.0433,
"step": 114
},
{
"epoch": 0.9942363112391931,
"grad_norm": 0.5982036590576172,
"learning_rate": 4.992030017576876e-06,
"loss": 0.0311,
"step": 115
},
{
"epoch": 1.0086455331412103,
"grad_norm": 1.6264456510543823,
"learning_rate": 4.990932595348788e-06,
"loss": 0.079,
"step": 116
},
{
"epoch": 1.0172910662824208,
"grad_norm": 0.5183172225952148,
"learning_rate": 4.989764548411654e-06,
"loss": 0.0268,
"step": 117
},
{
"epoch": 1.0259365994236311,
"grad_norm": 0.5135197043418884,
"learning_rate": 4.988525909882779e-06,
"loss": 0.0281,
"step": 118
},
{
"epoch": 1.0345821325648414,
"grad_norm": 0.6001684665679932,
"learning_rate": 4.987216714880929e-06,
"loss": 0.0312,
"step": 119
},
{
"epoch": 1.043227665706052,
"grad_norm": 0.5347486138343811,
"learning_rate": 4.9858370005253435e-06,
"loss": 0.0264,
"step": 120
},
{
"epoch": 1.0518731988472623,
"grad_norm": 0.8435088396072388,
"learning_rate": 4.9843868059346725e-06,
"loss": 0.0296,
"step": 121
},
{
"epoch": 1.0605187319884726,
"grad_norm": 0.8622809052467346,
"learning_rate": 4.982866172225876e-06,
"loss": 0.0277,
"step": 122
},
{
"epoch": 1.069164265129683,
"grad_norm": 0.8440551161766052,
"learning_rate": 4.981275142513049e-06,
"loss": 0.0301,
"step": 123
},
{
"epoch": 1.0778097982708934,
"grad_norm": 0.6853605508804321,
"learning_rate": 4.979613761906212e-06,
"loss": 0.0264,
"step": 124
},
{
"epoch": 1.0864553314121037,
"grad_norm": 0.7620036602020264,
"learning_rate": 4.977882077510018e-06,
"loss": 0.0281,
"step": 125
},
{
"epoch": 1.0951008645533142,
"grad_norm": 0.9126653075218201,
"learning_rate": 4.9760801384224274e-06,
"loss": 0.0312,
"step": 126
},
{
"epoch": 1.1037463976945245,
"grad_norm": 0.6158509850502014,
"learning_rate": 4.97420799573331e-06,
"loss": 0.0263,
"step": 127
},
{
"epoch": 1.1123919308357348,
"grad_norm": 0.7442693114280701,
"learning_rate": 4.972265702523001e-06,
"loss": 0.0251,
"step": 128
},
{
"epoch": 1.1210374639769451,
"grad_norm": 0.5755389928817749,
"learning_rate": 4.970253313860788e-06,
"loss": 0.0276,
"step": 129
},
{
"epoch": 1.1296829971181557,
"grad_norm": 0.6565670967102051,
"learning_rate": 4.968170886803361e-06,
"loss": 0.029,
"step": 130
},
{
"epoch": 1.138328530259366,
"grad_norm": 0.6405811309814453,
"learning_rate": 4.966018480393189e-06,
"loss": 0.0293,
"step": 131
},
{
"epoch": 1.1469740634005763,
"grad_norm": 0.5925298929214478,
"learning_rate": 4.9637961556568405e-06,
"loss": 0.0264,
"step": 132
},
{
"epoch": 1.1556195965417868,
"grad_norm": 0.5921182632446289,
"learning_rate": 4.961503975603263e-06,
"loss": 0.0305,
"step": 133
},
{
"epoch": 1.1642651296829971,
"grad_norm": 0.6314800977706909,
"learning_rate": 4.959142005221991e-06,
"loss": 0.0242,
"step": 134
},
{
"epoch": 1.1729106628242074,
"grad_norm": 0.6051074862480164,
"learning_rate": 4.956710311481303e-06,
"loss": 0.0303,
"step": 135
},
{
"epoch": 1.181556195965418,
"grad_norm": 0.7119901776313782,
"learning_rate": 4.954208963326327e-06,
"loss": 0.023,
"step": 136
},
{
"epoch": 1.1902017291066282,
"grad_norm": 1.087274432182312,
"learning_rate": 4.951638031677081e-06,
"loss": 0.0222,
"step": 137
},
{
"epoch": 1.1988472622478386,
"grad_norm": 0.5598759651184082,
"learning_rate": 4.948997589426463e-06,
"loss": 0.0255,
"step": 138
},
{
"epoch": 1.207492795389049,
"grad_norm": 0.7533854246139526,
"learning_rate": 4.94628771143819e-06,
"loss": 0.0245,
"step": 139
},
{
"epoch": 1.2161383285302594,
"grad_norm": 0.7722812294960022,
"learning_rate": 4.943508474544667e-06,
"loss": 0.0237,
"step": 140
},
{
"epoch": 1.2247838616714697,
"grad_norm": 0.6604969501495361,
"learning_rate": 4.940659957544813e-06,
"loss": 0.02,
"step": 141
},
{
"epoch": 1.23342939481268,
"grad_norm": 0.5789263248443604,
"learning_rate": 4.937742241201826e-06,
"loss": 0.0263,
"step": 142
},
{
"epoch": 1.2420749279538905,
"grad_norm": 1.1343308687210083,
"learning_rate": 4.934755408240896e-06,
"loss": 0.0291,
"step": 143
},
{
"epoch": 1.2507204610951008,
"grad_norm": 0.7230944037437439,
"learning_rate": 4.931699543346854e-06,
"loss": 0.0245,
"step": 144
},
{
"epoch": 1.2593659942363113,
"grad_norm": 0.591327428817749,
"learning_rate": 4.928574733161775e-06,
"loss": 0.0241,
"step": 145
},
{
"epoch": 1.2680115273775217,
"grad_norm": 0.6604726910591125,
"learning_rate": 4.925381066282522e-06,
"loss": 0.0242,
"step": 146
},
{
"epoch": 1.276657060518732,
"grad_norm": 0.5772287845611572,
"learning_rate": 4.922118633258229e-06,
"loss": 0.0272,
"step": 147
},
{
"epoch": 1.2853025936599423,
"grad_norm": 0.7148783802986145,
"learning_rate": 4.918787526587739e-06,
"loss": 0.0196,
"step": 148
},
{
"epoch": 1.2939481268011528,
"grad_norm": 0.660778284072876,
"learning_rate": 4.9153878407169815e-06,
"loss": 0.0202,
"step": 149
},
{
"epoch": 1.302593659942363,
"grad_norm": 0.6156355738639832,
"learning_rate": 4.911919672036291e-06,
"loss": 0.0297,
"step": 150
},
{
"epoch": 1.3112391930835736,
"grad_norm": 0.6828760504722595,
"learning_rate": 4.908383118877672e-06,
"loss": 0.0235,
"step": 151
},
{
"epoch": 1.319884726224784,
"grad_norm": 0.7847033143043518,
"learning_rate": 4.904778281512022e-06,
"loss": 0.0278,
"step": 152
},
{
"epoch": 1.3285302593659942,
"grad_norm": 0.9113219976425171,
"learning_rate": 4.901105262146275e-06,
"loss": 0.0226,
"step": 153
},
{
"epoch": 1.3371757925072045,
"grad_norm": 0.5406802892684937,
"learning_rate": 4.897364164920515e-06,
"loss": 0.0205,
"step": 154
},
{
"epoch": 1.345821325648415,
"grad_norm": 0.7523823976516724,
"learning_rate": 4.8935550959050135e-06,
"loss": 0.0209,
"step": 155
},
{
"epoch": 1.3544668587896254,
"grad_norm": 0.7060834169387817,
"learning_rate": 4.889678163097233e-06,
"loss": 0.0217,
"step": 156
},
{
"epoch": 1.3631123919308357,
"grad_norm": 0.758028507232666,
"learning_rate": 4.885733476418752e-06,
"loss": 0.02,
"step": 157
},
{
"epoch": 1.3717579250720462,
"grad_norm": 0.7745693325996399,
"learning_rate": 4.8817211477121615e-06,
"loss": 0.0197,
"step": 158
},
{
"epoch": 1.3804034582132565,
"grad_norm": 0.665055513381958,
"learning_rate": 4.8776412907378845e-06,
"loss": 0.019,
"step": 159
},
{
"epoch": 1.3890489913544668,
"grad_norm": 0.6661068797111511,
"learning_rate": 4.8734940211709535e-06,
"loss": 0.0196,
"step": 160
},
{
"epoch": 1.397694524495677,
"grad_norm": 0.9675925374031067,
"learning_rate": 4.8692794565977335e-06,
"loss": 0.0171,
"step": 161
},
{
"epoch": 1.4063400576368876,
"grad_norm": 0.6929494738578796,
"learning_rate": 4.864997716512584e-06,
"loss": 0.0198,
"step": 162
},
{
"epoch": 1.414985590778098,
"grad_norm": 0.6929437518119812,
"learning_rate": 4.8606489223144744e-06,
"loss": 0.0152,
"step": 163
},
{
"epoch": 1.4236311239193085,
"grad_norm": 0.7960386276245117,
"learning_rate": 4.8562331973035396e-06,
"loss": 0.0177,
"step": 164
},
{
"epoch": 1.4322766570605188,
"grad_norm": 0.8672429919242859,
"learning_rate": 4.851750666677583e-06,
"loss": 0.0215,
"step": 165
},
{
"epoch": 1.440922190201729,
"grad_norm": 0.7653059959411621,
"learning_rate": 4.847201457528533e-06,
"loss": 0.0172,
"step": 166
},
{
"epoch": 1.4495677233429394,
"grad_norm": 0.8027440905570984,
"learning_rate": 4.842585698838832e-06,
"loss": 0.0145,
"step": 167
},
{
"epoch": 1.45821325648415,
"grad_norm": 0.6481142044067383,
"learning_rate": 4.837903521477784e-06,
"loss": 0.0184,
"step": 168
},
{
"epoch": 1.4668587896253602,
"grad_norm": 0.8014233112335205,
"learning_rate": 4.833155058197842e-06,
"loss": 0.0207,
"step": 169
},
{
"epoch": 1.4755043227665707,
"grad_norm": 0.719078779220581,
"learning_rate": 4.828340443630847e-06,
"loss": 0.0184,
"step": 170
},
{
"epoch": 1.484149855907781,
"grad_norm": 0.6776456832885742,
"learning_rate": 4.823459814284205e-06,
"loss": 0.0186,
"step": 171
},
{
"epoch": 1.4927953890489913,
"grad_norm": 0.8054779171943665,
"learning_rate": 4.818513308537025e-06,
"loss": 0.0179,
"step": 172
},
{
"epoch": 1.5014409221902016,
"grad_norm": 0.8131511211395264,
"learning_rate": 4.813501066636188e-06,
"loss": 0.0177,
"step": 173
},
{
"epoch": 1.510086455331412,
"grad_norm": 0.7842754125595093,
"learning_rate": 4.808423230692374e-06,
"loss": 0.0182,
"step": 174
},
{
"epoch": 1.5187319884726225,
"grad_norm": 0.6084232330322266,
"learning_rate": 4.8032799446760326e-06,
"loss": 0.014,
"step": 175
},
{
"epoch": 1.527377521613833,
"grad_norm": 0.7110248804092407,
"learning_rate": 4.798071354413302e-06,
"loss": 0.0164,
"step": 176
},
{
"epoch": 1.5360230547550433,
"grad_norm": 0.707081139087677,
"learning_rate": 4.792797607581872e-06,
"loss": 0.0137,
"step": 177
},
{
"epoch": 1.5446685878962536,
"grad_norm": 0.6509385704994202,
"learning_rate": 4.787458853706798e-06,
"loss": 0.0145,
"step": 178
},
{
"epoch": 1.553314121037464,
"grad_norm": 0.7796662449836731,
"learning_rate": 4.7820552441562625e-06,
"loss": 0.0169,
"step": 179
},
{
"epoch": 1.5619596541786742,
"grad_norm": 0.7351200580596924,
"learning_rate": 4.7765869321372835e-06,
"loss": 0.0152,
"step": 180
},
{
"epoch": 1.5706051873198847,
"grad_norm": 0.9273463487625122,
"learning_rate": 4.771054072691367e-06,
"loss": 0.0181,
"step": 181
},
{
"epoch": 1.579250720461095,
"grad_norm": 0.7243021130561829,
"learning_rate": 4.7654568226901165e-06,
"loss": 0.0148,
"step": 182
},
{
"epoch": 1.5878962536023056,
"grad_norm": 0.729568600654602,
"learning_rate": 4.759795340830782e-06,
"loss": 0.0145,
"step": 183
},
{
"epoch": 1.5965417867435159,
"grad_norm": 0.5814985632896423,
"learning_rate": 4.754069787631761e-06,
"loss": 0.0128,
"step": 184
},
{
"epoch": 1.6051873198847262,
"grad_norm": 0.49074554443359375,
"learning_rate": 4.7482803254280485e-06,
"loss": 0.0125,
"step": 185
},
{
"epoch": 1.6138328530259365,
"grad_norm": 0.5640458464622498,
"learning_rate": 4.742427118366632e-06,
"loss": 0.0155,
"step": 186
},
{
"epoch": 1.622478386167147,
"grad_norm": 0.574414849281311,
"learning_rate": 4.736510332401841e-06,
"loss": 0.0164,
"step": 187
},
{
"epoch": 1.6311239193083573,
"grad_norm": 0.5837032198905945,
"learning_rate": 4.730530135290638e-06,
"loss": 0.0129,
"step": 188
},
{
"epoch": 1.6397694524495678,
"grad_norm": 0.5181459188461304,
"learning_rate": 4.724486696587862e-06,
"loss": 0.016,
"step": 189
},
{
"epoch": 1.6484149855907781,
"grad_norm": 0.990842342376709,
"learning_rate": 4.718380187641429e-06,
"loss": 0.0173,
"step": 190
},
{
"epoch": 1.6570605187319885,
"grad_norm": 0.6795041561126709,
"learning_rate": 4.712210781587463e-06,
"loss": 0.013,
"step": 191
},
{
"epoch": 1.6657060518731988,
"grad_norm": 0.7275810837745667,
"learning_rate": 4.705978653345392e-06,
"loss": 0.0142,
"step": 192
},
{
"epoch": 1.674351585014409,
"grad_norm": 0.575529932975769,
"learning_rate": 4.699683979612991e-06,
"loss": 0.0128,
"step": 193
},
{
"epoch": 1.6829971181556196,
"grad_norm": 0.8860746622085571,
"learning_rate": 4.693326938861367e-06,
"loss": 0.0115,
"step": 194
},
{
"epoch": 1.6916426512968301,
"grad_norm": 0.985464870929718,
"learning_rate": 4.686907711329903e-06,
"loss": 0.0146,
"step": 195
},
{
"epoch": 1.7002881844380404,
"grad_norm": 0.5690702199935913,
"learning_rate": 4.680426479021147e-06,
"loss": 0.0133,
"step": 196
},
{
"epoch": 1.7089337175792507,
"grad_norm": 0.536115825176239,
"learning_rate": 4.67388342569565e-06,
"loss": 0.0082,
"step": 197
},
{
"epoch": 1.717579250720461,
"grad_norm": 0.9312114119529724,
"learning_rate": 4.667278736866755e-06,
"loss": 0.0127,
"step": 198
},
{
"epoch": 1.7262247838616713,
"grad_norm": 0.8902615904808044,
"learning_rate": 4.660612599795343e-06,
"loss": 0.0121,
"step": 199
},
{
"epoch": 1.7348703170028819,
"grad_norm": 0.8274016380310059,
"learning_rate": 4.653885203484516e-06,
"loss": 0.0142,
"step": 200
},
{
"epoch": 1.7435158501440924,
"grad_norm": 1.00035560131073,
"learning_rate": 4.647096738674243e-06,
"loss": 0.0104,
"step": 201
},
{
"epoch": 1.7521613832853027,
"grad_norm": 0.5850755572319031,
"learning_rate": 4.640247397835953e-06,
"loss": 0.0083,
"step": 202
},
{
"epoch": 1.760806916426513,
"grad_norm": 0.588136613368988,
"learning_rate": 4.633337375167074e-06,
"loss": 0.0108,
"step": 203
},
{
"epoch": 1.7694524495677233,
"grad_norm": 0.6305558681488037,
"learning_rate": 4.626366866585528e-06,
"loss": 0.0103,
"step": 204
},
{
"epoch": 1.7780979827089336,
"grad_norm": 0.46658825874328613,
"learning_rate": 4.619336069724177e-06,
"loss": 0.0103,
"step": 205
},
{
"epoch": 1.7867435158501441,
"grad_norm": 2.3057351112365723,
"learning_rate": 4.612245183925225e-06,
"loss": 0.0123,
"step": 206
},
{
"epoch": 1.7953890489913544,
"grad_norm": 0.6322522163391113,
"learning_rate": 4.605094410234551e-06,
"loss": 0.0123,
"step": 207
},
{
"epoch": 1.804034582132565,
"grad_norm": 1.1089496612548828,
"learning_rate": 4.597883951396027e-06,
"loss": 0.0149,
"step": 208
},
{
"epoch": 1.8126801152737753,
"grad_norm": 0.7867231965065002,
"learning_rate": 4.590614011845758e-06,
"loss": 0.01,
"step": 209
},
{
"epoch": 1.8213256484149856,
"grad_norm": 0.8597843647003174,
"learning_rate": 4.583284797706288e-06,
"loss": 0.0121,
"step": 210
},
{
"epoch": 1.8299711815561959,
"grad_norm": 0.6839237809181213,
"learning_rate": 4.575896516780757e-06,
"loss": 0.0108,
"step": 211
},
{
"epoch": 1.8386167146974062,
"grad_norm": 0.9036144614219666,
"learning_rate": 4.568449378547011e-06,
"loss": 0.009,
"step": 212
},
{
"epoch": 1.8472622478386167,
"grad_norm": 0.439732164144516,
"learning_rate": 4.560943594151657e-06,
"loss": 0.0097,
"step": 213
},
{
"epoch": 1.8559077809798272,
"grad_norm": 0.5021887421607971,
"learning_rate": 4.553379376404085e-06,
"loss": 0.0087,
"step": 214
},
{
"epoch": 1.8645533141210375,
"grad_norm": 0.36055275797843933,
"learning_rate": 4.5457569397704226e-06,
"loss": 0.0079,
"step": 215
},
{
"epoch": 1.8731988472622478,
"grad_norm": 0.5071420669555664,
"learning_rate": 4.538076500367469e-06,
"loss": 0.0065,
"step": 216
},
{
"epoch": 1.8818443804034581,
"grad_norm": 0.5510254502296448,
"learning_rate": 4.530338275956553e-06,
"loss": 0.0099,
"step": 217
},
{
"epoch": 1.8904899135446684,
"grad_norm": 0.8489411473274231,
"learning_rate": 4.522542485937369e-06,
"loss": 0.0068,
"step": 218
},
{
"epoch": 1.899135446685879,
"grad_norm": 0.48011505603790283,
"learning_rate": 4.514689351341751e-06,
"loss": 0.007,
"step": 219
},
{
"epoch": 1.9077809798270895,
"grad_norm": 0.7145248055458069,
"learning_rate": 4.506779094827409e-06,
"loss": 0.0085,
"step": 220
},
{
"epoch": 1.9164265129682998,
"grad_norm": 0.7936932444572449,
"learning_rate": 4.498811940671615e-06,
"loss": 0.0086,
"step": 221
},
{
"epoch": 1.92507204610951,
"grad_norm": 0.7309126257896423,
"learning_rate": 4.49078811476484e-06,
"loss": 0.0093,
"step": 222
},
{
"epoch": 1.9337175792507204,
"grad_norm": 0.6176413893699646,
"learning_rate": 4.482707844604359e-06,
"loss": 0.0095,
"step": 223
},
{
"epoch": 1.9423631123919307,
"grad_norm": 0.995867133140564,
"learning_rate": 4.474571359287791e-06,
"loss": 0.0061,
"step": 224
},
{
"epoch": 1.9510086455331412,
"grad_norm": 0.47976699471473694,
"learning_rate": 4.466378889506607e-06,
"loss": 0.0054,
"step": 225
},
{
"epoch": 1.9596541786743515,
"grad_norm": 0.740679144859314,
"learning_rate": 4.458130667539592e-06,
"loss": 0.0097,
"step": 226
},
{
"epoch": 1.968299711815562,
"grad_norm": 0.565301775932312,
"learning_rate": 4.449826927246257e-06,
"loss": 0.008,
"step": 227
},
{
"epoch": 1.9769452449567724,
"grad_norm": 0.6384701728820801,
"learning_rate": 4.441467904060207e-06,
"loss": 0.0053,
"step": 228
},
{
"epoch": 1.9855907780979827,
"grad_norm": 0.8729382157325745,
"learning_rate": 4.4330538349824684e-06,
"loss": 0.0092,
"step": 229
},
{
"epoch": 1.994236311239193,
"grad_norm": 0.6458576321601868,
"learning_rate": 4.424584958574766e-06,
"loss": 0.0082,
"step": 230
},
{
"epoch": 2.0086455331412103,
"grad_norm": 2.1104297637939453,
"learning_rate": 4.4160615149527646e-06,
"loss": 0.0172,
"step": 231
},
{
"epoch": 2.0172910662824206,
"grad_norm": 0.8500051498413086,
"learning_rate": 4.407483745779256e-06,
"loss": 0.0075,
"step": 232
},
{
"epoch": 2.025936599423631,
"grad_norm": 0.8504526615142822,
"learning_rate": 4.39885189425731e-06,
"loss": 0.0063,
"step": 233
},
{
"epoch": 2.0345821325648417,
"grad_norm": 0.5642857551574707,
"learning_rate": 4.3901662051233755e-06,
"loss": 0.0046,
"step": 234
},
{
"epoch": 2.043227665706052,
"grad_norm": 1.6137267351150513,
"learning_rate": 4.381426924640346e-06,
"loss": 0.0111,
"step": 235
},
{
"epoch": 2.0518731988472623,
"grad_norm": 0.6515895128250122,
"learning_rate": 4.372634300590578e-06,
"loss": 0.0054,
"step": 236
},
{
"epoch": 2.0605187319884726,
"grad_norm": 0.42837992310523987,
"learning_rate": 4.363788582268857e-06,
"loss": 0.0064,
"step": 237
},
{
"epoch": 2.069164265129683,
"grad_norm": 0.6755346059799194,
"learning_rate": 4.35489002047534e-06,
"loss": 0.0066,
"step": 238
},
{
"epoch": 2.077809798270893,
"grad_norm": 0.5027966499328613,
"learning_rate": 4.345938867508439e-06,
"loss": 0.0053,
"step": 239
},
{
"epoch": 2.086455331412104,
"grad_norm": 0.44720327854156494,
"learning_rate": 4.336935377157668e-06,
"loss": 0.0053,
"step": 240
},
{
"epoch": 2.0951008645533142,
"grad_norm": 0.9336118102073669,
"learning_rate": 4.32787980469645e-06,
"loss": 0.0071,
"step": 241
},
{
"epoch": 2.1037463976945245,
"grad_norm": 0.6465862393379211,
"learning_rate": 4.318772406874873e-06,
"loss": 0.004,
"step": 242
},
{
"epoch": 2.112391930835735,
"grad_norm": 0.4339396357536316,
"learning_rate": 4.309613441912421e-06,
"loss": 0.0062,
"step": 243
},
{
"epoch": 2.121037463976945,
"grad_norm": 0.805067777633667,
"learning_rate": 4.30040316949064e-06,
"loss": 0.0052,
"step": 244
},
{
"epoch": 2.1296829971181555,
"grad_norm": 0.5986116528511047,
"learning_rate": 4.291141850745788e-06,
"loss": 0.004,
"step": 245
},
{
"epoch": 2.138328530259366,
"grad_norm": 0.33404719829559326,
"learning_rate": 4.281829748261422e-06,
"loss": 0.004,
"step": 246
},
{
"epoch": 2.1469740634005765,
"grad_norm": 0.3907548189163208,
"learning_rate": 4.272467126060954e-06,
"loss": 0.0057,
"step": 247
},
{
"epoch": 2.155619596541787,
"grad_norm": 0.4643802046775818,
"learning_rate": 4.263054249600172e-06,
"loss": 0.0045,
"step": 248
},
{
"epoch": 2.164265129682997,
"grad_norm": 0.46979233622550964,
"learning_rate": 4.253591385759705e-06,
"loss": 0.0045,
"step": 249
},
{
"epoch": 2.1729106628242074,
"grad_norm": 0.4389543831348419,
"learning_rate": 4.244078802837462e-06,
"loss": 0.005,
"step": 250
},
{
"epoch": 2.1815561959654177,
"grad_norm": 0.33877941966056824,
"learning_rate": 4.234516770541023e-06,
"loss": 0.0036,
"step": 251
},
{
"epoch": 2.1902017291066285,
"grad_norm": 0.41794854402542114,
"learning_rate": 4.224905559979991e-06,
"loss": 0.0049,
"step": 252
},
{
"epoch": 2.1988472622478388,
"grad_norm": 0.471942275762558,
"learning_rate": 4.215245443658307e-06,
"loss": 0.004,
"step": 253
},
{
"epoch": 2.207492795389049,
"grad_norm": 0.5759933590888977,
"learning_rate": 4.205536695466524e-06,
"loss": 0.0037,
"step": 254
},
{
"epoch": 2.2161383285302594,
"grad_norm": 0.4565725326538086,
"learning_rate": 4.1957795906740404e-06,
"loss": 0.0041,
"step": 255
},
{
"epoch": 2.2247838616714697,
"grad_norm": 0.5055815577507019,
"learning_rate": 4.1859744059212945e-06,
"loss": 0.0058,
"step": 256
},
{
"epoch": 2.23342939481268,
"grad_norm": 0.5749806761741638,
"learning_rate": 4.176121419211924e-06,
"loss": 0.0061,
"step": 257
},
{
"epoch": 2.2420749279538903,
"grad_norm": 1.0351425409317017,
"learning_rate": 4.16622090990488e-06,
"loss": 0.0046,
"step": 258
},
{
"epoch": 2.250720461095101,
"grad_norm": 1.2486116886138916,
"learning_rate": 4.15627315870651e-06,
"loss": 0.0065,
"step": 259
},
{
"epoch": 2.2593659942363113,
"grad_norm": 0.6449837684631348,
"learning_rate": 4.146278447662597e-06,
"loss": 0.0055,
"step": 260
},
{
"epoch": 2.2680115273775217,
"grad_norm": 0.7886547446250916,
"learning_rate": 4.136237060150363e-06,
"loss": 0.0083,
"step": 261
},
{
"epoch": 2.276657060518732,
"grad_norm": 0.797860324382782,
"learning_rate": 4.126149280870434e-06,
"loss": 0.0043,
"step": 262
},
{
"epoch": 2.2853025936599423,
"grad_norm": 0.8402999639511108,
"learning_rate": 4.116015395838772e-06,
"loss": 0.0044,
"step": 263
},
{
"epoch": 2.2939481268011526,
"grad_norm": 0.4193873107433319,
"learning_rate": 4.105835692378557e-06,
"loss": 0.0028,
"step": 264
},
{
"epoch": 2.302593659942363,
"grad_norm": 0.4523886740207672,
"learning_rate": 4.095610459112051e-06,
"loss": 0.005,
"step": 265
},
{
"epoch": 2.3112391930835736,
"grad_norm": 0.45222657918930054,
"learning_rate": 4.0853399859524066e-06,
"loss": 0.0027,
"step": 266
},
{
"epoch": 2.319884726224784,
"grad_norm": 0.5312837362289429,
"learning_rate": 4.075024564095452e-06,
"loss": 0.0072,
"step": 267
},
{
"epoch": 2.3285302593659942,
"grad_norm": 0.5837632417678833,
"learning_rate": 4.064664486011433e-06,
"loss": 0.0056,
"step": 268
},
{
"epoch": 2.3371757925072045,
"grad_norm": 0.44099336862564087,
"learning_rate": 4.05426004543672e-06,
"loss": 0.0052,
"step": 269
},
{
"epoch": 2.345821325648415,
"grad_norm": 0.4609047472476959,
"learning_rate": 4.04381153736548e-06,
"loss": 0.0041,
"step": 270
},
{
"epoch": 2.3544668587896256,
"grad_norm": 0.4224575161933899,
"learning_rate": 4.033319258041316e-06,
"loss": 0.0056,
"step": 271
},
{
"epoch": 2.363112391930836,
"grad_norm": 0.45308536291122437,
"learning_rate": 4.022783504948862e-06,
"loss": 0.0037,
"step": 272
},
{
"epoch": 2.371757925072046,
"grad_norm": 0.29924383759498596,
"learning_rate": 4.012204576805352e-06,
"loss": 0.0037,
"step": 273
},
{
"epoch": 2.3804034582132565,
"grad_norm": 0.33644095063209534,
"learning_rate": 4.001582773552153e-06,
"loss": 0.003,
"step": 274
},
{
"epoch": 2.389048991354467,
"grad_norm": 0.553075909614563,
"learning_rate": 3.990918396346254e-06,
"loss": 0.0024,
"step": 275
},
{
"epoch": 2.397694524495677,
"grad_norm": 0.7815229892730713,
"learning_rate": 3.9802117475517335e-06,
"loss": 0.0024,
"step": 276
},
{
"epoch": 2.4063400576368874,
"grad_norm": 0.512950599193573,
"learning_rate": 3.969463130731183e-06,
"loss": 0.0037,
"step": 277
},
{
"epoch": 2.414985590778098,
"grad_norm": 0.45993947982788086,
"learning_rate": 3.958672850637104e-06,
"loss": 0.0029,
"step": 278
},
{
"epoch": 2.4236311239193085,
"grad_norm": 0.9428088068962097,
"learning_rate": 3.947841213203262e-06,
"loss": 0.0029,
"step": 279
},
{
"epoch": 2.4322766570605188,
"grad_norm": 0.4953707158565521,
"learning_rate": 3.936968525536018e-06,
"loss": 0.0029,
"step": 280
},
{
"epoch": 2.440922190201729,
"grad_norm": 0.3896157443523407,
"learning_rate": 3.926055095905616e-06,
"loss": 0.003,
"step": 281
},
{
"epoch": 2.4495677233429394,
"grad_norm": 0.5497294664382935,
"learning_rate": 3.9151012337374495e-06,
"loss": 0.004,
"step": 282
},
{
"epoch": 2.4582132564841497,
"grad_norm": 0.31193655729293823,
"learning_rate": 3.9041072496032805e-06,
"loss": 0.0025,
"step": 283
},
{
"epoch": 2.46685878962536,
"grad_norm": 0.6194396615028381,
"learning_rate": 3.893073455212438e-06,
"loss": 0.0035,
"step": 284
},
{
"epoch": 2.4755043227665707,
"grad_norm": 0.5500622987747192,
"learning_rate": 3.882000163402984e-06,
"loss": 0.0021,
"step": 285
},
{
"epoch": 2.484149855907781,
"grad_norm": 0.3981945812702179,
"learning_rate": 3.870887688132834e-06,
"loss": 0.0046,
"step": 286
},
{
"epoch": 2.4927953890489913,
"grad_norm": 0.6703558564186096,
"learning_rate": 3.859736344470866e-06,
"loss": 0.0032,
"step": 287
},
{
"epoch": 2.5014409221902016,
"grad_norm": 0.6285901665687561,
"learning_rate": 3.8485464485879785e-06,
"loss": 0.0055,
"step": 288
},
{
"epoch": 2.510086455331412,
"grad_norm": 0.45444604754447937,
"learning_rate": 3.837318317748134e-06,
"loss": 0.0029,
"step": 289
},
{
"epoch": 2.5187319884726227,
"grad_norm": 0.5909140706062317,
"learning_rate": 3.826052270299356e-06,
"loss": 0.0018,
"step": 290
},
{
"epoch": 2.527377521613833,
"grad_norm": 0.28365251421928406,
"learning_rate": 3.814748625664711e-06,
"loss": 0.0016,
"step": 291
},
{
"epoch": 2.5360230547550433,
"grad_norm": 0.6033309102058411,
"learning_rate": 3.8034077043332463e-06,
"loss": 0.0026,
"step": 292
},
{
"epoch": 2.5446685878962536,
"grad_norm": 0.5435200929641724,
"learning_rate": 3.7920298278509028e-06,
"loss": 0.003,
"step": 293
},
{
"epoch": 2.553314121037464,
"grad_norm": 0.4895404875278473,
"learning_rate": 3.7806153188114027e-06,
"loss": 0.0028,
"step": 294
},
{
"epoch": 2.561959654178674,
"grad_norm": 0.37084832787513733,
"learning_rate": 3.7691645008471e-06,
"loss": 0.0019,
"step": 295
},
{
"epoch": 2.5706051873198845,
"grad_norm": 0.35942429304122925,
"learning_rate": 3.7576776986198064e-06,
"loss": 0.004,
"step": 296
},
{
"epoch": 2.5792507204610953,
"grad_norm": 0.9116867780685425,
"learning_rate": 3.7461552378115833e-06,
"loss": 0.0032,
"step": 297
},
{
"epoch": 2.5878962536023056,
"grad_norm": 0.4446312487125397,
"learning_rate": 3.734597445115511e-06,
"loss": 0.0016,
"step": 298
},
{
"epoch": 2.596541786743516,
"grad_norm": 0.2611502408981323,
"learning_rate": 3.7230046482264256e-06,
"loss": 0.0031,
"step": 299
},
{
"epoch": 2.605187319884726,
"grad_norm": 0.5151038765907288,
"learning_rate": 3.711377175831626e-06,
"loss": 0.0028,
"step": 300
},
{
"epoch": 2.6138328530259365,
"grad_norm": 0.3680933713912964,
"learning_rate": 3.6997153576015552e-06,
"loss": 0.0008,
"step": 301
},
{
"epoch": 2.6224783861671472,
"grad_norm": 0.42117324471473694,
"learning_rate": 3.6880195241804567e-06,
"loss": 0.0037,
"step": 302
},
{
"epoch": 2.631123919308357,
"grad_norm": 0.35740363597869873,
"learning_rate": 3.676290007176994e-06,
"loss": 0.0015,
"step": 303
},
{
"epoch": 2.639769452449568,
"grad_norm": 0.5276333093643188,
"learning_rate": 3.6645271391548542e-06,
"loss": 0.0043,
"step": 304
},
{
"epoch": 2.648414985590778,
"grad_norm": 0.6532303094863892,
"learning_rate": 3.652731253623315e-06,
"loss": 0.0023,
"step": 305
},
{
"epoch": 2.6570605187319885,
"grad_norm": 0.2517545521259308,
"learning_rate": 3.6409026850277908e-06,
"loss": 0.001,
"step": 306
},
{
"epoch": 2.6657060518731988,
"grad_norm": 0.4791451096534729,
"learning_rate": 3.6290417687403485e-06,
"loss": 0.0025,
"step": 307
},
{
"epoch": 2.674351585014409,
"grad_norm": 0.665262758731842,
"learning_rate": 3.617148841050202e-06,
"loss": 0.0022,
"step": 308
},
{
"epoch": 2.68299711815562,
"grad_norm": 0.40816545486450195,
"learning_rate": 3.6052242391541746e-06,
"loss": 0.0033,
"step": 309
},
{
"epoch": 2.69164265129683,
"grad_norm": 0.3500838577747345,
"learning_rate": 3.593268301147139e-06,
"loss": 0.0024,
"step": 310
},
{
"epoch": 2.7002881844380404,
"grad_norm": 0.8193225264549255,
"learning_rate": 3.5812813660124313e-06,
"loss": 0.0031,
"step": 311
},
{
"epoch": 2.7089337175792507,
"grad_norm": 0.3912574350833893,
"learning_rate": 3.5692637736122427e-06,
"loss": 0.0016,
"step": 312
},
{
"epoch": 2.717579250720461,
"grad_norm": 0.46311694383621216,
"learning_rate": 3.5572158646779787e-06,
"loss": 0.002,
"step": 313
},
{
"epoch": 2.7262247838616713,
"grad_norm": 0.5172969698905945,
"learning_rate": 3.5451379808006014e-06,
"loss": 0.0019,
"step": 314
},
{
"epoch": 2.7348703170028816,
"grad_norm": 0.2887652814388275,
"learning_rate": 3.5330304644209456e-06,
"loss": 0.0023,
"step": 315
},
{
"epoch": 2.7435158501440924,
"grad_norm": 0.6363840699195862,
"learning_rate": 3.520893658820007e-06,
"loss": 0.0047,
"step": 316
},
{
"epoch": 2.7521613832853027,
"grad_norm": 0.8680986762046814,
"learning_rate": 3.50872790810921e-06,
"loss": 0.0017,
"step": 317
},
{
"epoch": 2.760806916426513,
"grad_norm": 0.2707628607749939,
"learning_rate": 3.4965335572206516e-06,
"loss": 0.0019,
"step": 318
},
{
"epoch": 2.7694524495677233,
"grad_norm": 0.5891854166984558,
"learning_rate": 3.484310951897323e-06,
"loss": 0.0025,
"step": 319
},
{
"epoch": 2.7780979827089336,
"grad_norm": 0.3361755609512329,
"learning_rate": 3.4720604386833024e-06,
"loss": 0.0016,
"step": 320
},
{
"epoch": 2.7867435158501443,
"grad_norm": 0.2326732724905014,
"learning_rate": 3.459782364913935e-06,
"loss": 0.0049,
"step": 321
},
{
"epoch": 2.795389048991354,
"grad_norm": 0.7111459970474243,
"learning_rate": 3.447477078705983e-06,
"loss": 0.0023,
"step": 322
},
{
"epoch": 2.804034582132565,
"grad_norm": 0.5887550711631775,
"learning_rate": 3.4351449289477543e-06,
"loss": 0.0029,
"step": 323
},
{
"epoch": 2.8126801152737753,
"grad_norm": 0.6490927934646606,
"learning_rate": 3.4227862652892106e-06,
"loss": 0.0017,
"step": 324
},
{
"epoch": 2.8213256484149856,
"grad_norm": 0.2970047891139984,
"learning_rate": 3.410401438132056e-06,
"loss": 0.0031,
"step": 325
},
{
"epoch": 2.829971181556196,
"grad_norm": 0.9381114840507507,
"learning_rate": 3.3979907986197996e-06,
"loss": 0.0034,
"step": 326
},
{
"epoch": 2.838616714697406,
"grad_norm": 0.34777429699897766,
"learning_rate": 3.385554698627803e-06,
"loss": 0.0029,
"step": 327
},
{
"epoch": 2.847262247838617,
"grad_norm": 0.5431174635887146,
"learning_rate": 3.3730934907532997e-06,
"loss": 0.0012,
"step": 328
},
{
"epoch": 2.8559077809798272,
"grad_norm": 0.22815977036952972,
"learning_rate": 3.3606075283054005e-06,
"loss": 0.0011,
"step": 329
},
{
"epoch": 2.8645533141210375,
"grad_norm": 0.2716272473335266,
"learning_rate": 3.3480971652950757e-06,
"loss": 0.0015,
"step": 330
},
{
"epoch": 2.873198847262248,
"grad_norm": 0.42019474506378174,
"learning_rate": 3.3355627564251185e-06,
"loss": 0.0026,
"step": 331
},
{
"epoch": 2.881844380403458,
"grad_norm": 0.41707322001457214,
"learning_rate": 3.3230046570800866e-06,
"loss": 0.0017,
"step": 332
},
{
"epoch": 2.8904899135446684,
"grad_norm": 0.23598358035087585,
"learning_rate": 3.3104232233162272e-06,
"loss": 0.0008,
"step": 333
},
{
"epoch": 2.8991354466858787,
"grad_norm": 0.53864985704422,
"learning_rate": 3.2978188118513814e-06,
"loss": 0.0017,
"step": 334
},
{
"epoch": 2.9077809798270895,
"grad_norm": 0.2139906883239746,
"learning_rate": 3.2851917800548726e-06,
"loss": 0.0025,
"step": 335
},
{
"epoch": 2.9164265129683,
"grad_norm": 0.49649330973625183,
"learning_rate": 3.272542485937369e-06,
"loss": 0.0016,
"step": 336
},
{
"epoch": 2.92507204610951,
"grad_norm": 0.1875542253255844,
"learning_rate": 3.259871288140738e-06,
"loss": 0.0015,
"step": 337
},
{
"epoch": 2.9337175792507204,
"grad_norm": 0.4745478928089142,
"learning_rate": 3.247178545927876e-06,
"loss": 0.0023,
"step": 338
},
{
"epoch": 2.9423631123919307,
"grad_norm": 0.5308757424354553,
"learning_rate": 3.234464619172522e-06,
"loss": 0.0026,
"step": 339
},
{
"epoch": 2.9510086455331415,
"grad_norm": 0.3783188760280609,
"learning_rate": 3.221729868349053e-06,
"loss": 0.0007,
"step": 340
},
{
"epoch": 2.9596541786743513,
"grad_norm": 0.2250707745552063,
"learning_rate": 3.208974654522266e-06,
"loss": 0.0006,
"step": 341
},
{
"epoch": 2.968299711815562,
"grad_norm": 0.2039024382829666,
"learning_rate": 3.1961993393371405e-06,
"loss": 0.001,
"step": 342
},
{
"epoch": 2.9769452449567724,
"grad_norm": 0.14253279566764832,
"learning_rate": 3.183404285008582e-06,
"loss": 0.0041,
"step": 343
},
{
"epoch": 2.9855907780979827,
"grad_norm": 0.45035484433174133,
"learning_rate": 3.1705898543111576e-06,
"loss": 0.0007,
"step": 344
},
{
"epoch": 2.994236311239193,
"grad_norm": 0.11087123304605484,
"learning_rate": 3.157756410568803e-06,
"loss": 0.0016,
"step": 345
},
{
"epoch": 3.0086455331412103,
"grad_norm": 0.2526646554470062,
"learning_rate": 3.14490431764453e-06,
"loss": 0.0041,
"step": 346
},
{
"epoch": 3.0172910662824206,
"grad_norm": 0.4000411927700043,
"learning_rate": 3.132033939930101e-06,
"loss": 0.0015,
"step": 347
},
{
"epoch": 3.025936599423631,
"grad_norm": 0.30819934606552124,
"learning_rate": 3.1191456423357047e-06,
"loss": 0.001,
"step": 348
},
{
"epoch": 3.0345821325648417,
"grad_norm": 0.17910632491111755,
"learning_rate": 3.106239790279606e-06,
"loss": 0.0009,
"step": 349
},
{
"epoch": 3.043227665706052,
"grad_norm": 0.2142518013715744,
"learning_rate": 3.093316749677788e-06,
"loss": 0.0016,
"step": 350
},
{
"epoch": 3.0518731988472623,
"grad_norm": 0.17407968640327454,
"learning_rate": 3.0803768869335726e-06,
"loss": 0.0012,
"step": 351
},
{
"epoch": 3.0605187319884726,
"grad_norm": 0.420547753572464,
"learning_rate": 3.0674205689272378e-06,
"loss": 0.0012,
"step": 352
},
{
"epoch": 3.069164265129683,
"grad_norm": 0.298476904630661,
"learning_rate": 3.054448163005613e-06,
"loss": 0.0016,
"step": 353
},
{
"epoch": 3.077809798270893,
"grad_norm": 0.34958237409591675,
"learning_rate": 3.041460036971664e-06,
"loss": 0.001,
"step": 354
},
{
"epoch": 3.086455331412104,
"grad_norm": 0.34075653553009033,
"learning_rate": 3.028456559074061e-06,
"loss": 0.0012,
"step": 355
},
{
"epoch": 3.0951008645533142,
"grad_norm": 0.2592026889324188,
"learning_rate": 3.0154380979967456e-06,
"loss": 0.0008,
"step": 356
},
{
"epoch": 3.1037463976945245,
"grad_norm": 0.22481408715248108,
"learning_rate": 3.0024050228484713e-06,
"loss": 0.001,
"step": 357
},
{
"epoch": 3.112391930835735,
"grad_norm": 0.29716426134109497,
"learning_rate": 2.9893577031523403e-06,
"loss": 0.0014,
"step": 358
},
{
"epoch": 3.121037463976945,
"grad_norm": 0.10284000635147095,
"learning_rate": 2.976296508835326e-06,
"loss": 0.0003,
"step": 359
},
{
"epoch": 3.1296829971181555,
"grad_norm": 0.9767228364944458,
"learning_rate": 2.963221810217786e-06,
"loss": 0.0016,
"step": 360
},
{
"epoch": 3.138328530259366,
"grad_norm": 0.08798839896917343,
"learning_rate": 2.9501339780029614e-06,
"loss": 0.0006,
"step": 361
},
{
"epoch": 3.1469740634005765,
"grad_norm": 0.4243233799934387,
"learning_rate": 2.937033383266466e-06,
"loss": 0.0026,
"step": 362
},
{
"epoch": 3.155619596541787,
"grad_norm": 0.06578671187162399,
"learning_rate": 2.923920397445766e-06,
"loss": 0.0004,
"step": 363
},
{
"epoch": 3.164265129682997,
"grad_norm": 0.4194351136684418,
"learning_rate": 2.910795392329649e-06,
"loss": 0.0012,
"step": 364
},
{
"epoch": 3.1729106628242074,
"grad_norm": 0.18414922058582306,
"learning_rate": 2.8976587400476804e-06,
"loss": 0.0007,
"step": 365
},
{
"epoch": 3.1815561959654177,
"grad_norm": 0.3256891071796417,
"learning_rate": 2.884510813059657e-06,
"loss": 0.0009,
"step": 366
},
{
"epoch": 3.1902017291066285,
"grad_norm": 0.48699238896369934,
"learning_rate": 2.871351984145042e-06,
"loss": 0.0011,
"step": 367
},
{
"epoch": 3.1988472622478388,
"grad_norm": 0.12711212038993835,
"learning_rate": 2.8581826263923993e-06,
"loss": 0.0004,
"step": 368
},
{
"epoch": 3.207492795389049,
"grad_norm": 0.2759152054786682,
"learning_rate": 2.8450031131888147e-06,
"loss": 0.0008,
"step": 369
},
{
"epoch": 3.2161383285302594,
"grad_norm": 0.17881423234939575,
"learning_rate": 2.8318138182093053e-06,
"loss": 0.0005,
"step": 370
},
{
"epoch": 3.2247838616714697,
"grad_norm": 0.09530279785394669,
"learning_rate": 2.8186151154062314e-06,
"loss": 0.0005,
"step": 371
},
{
"epoch": 3.23342939481268,
"grad_norm": 0.29938796162605286,
"learning_rate": 2.8054073789986884e-06,
"loss": 0.0007,
"step": 372
},
{
"epoch": 3.2420749279538903,
"grad_norm": 0.137968972325325,
"learning_rate": 2.792190983461902e-06,
"loss": 0.0005,
"step": 373
},
{
"epoch": 3.250720461095101,
"grad_norm": 0.13374479115009308,
"learning_rate": 2.7789663035166035e-06,
"loss": 0.0002,
"step": 374
},
{
"epoch": 3.2593659942363113,
"grad_norm": 0.5121517777442932,
"learning_rate": 2.7657337141184137e-06,
"loss": 0.0007,
"step": 375
},
{
"epoch": 3.2680115273775217,
"grad_norm": 0.34778279066085815,
"learning_rate": 2.7524935904472056e-06,
"loss": 0.0006,
"step": 376
},
{
"epoch": 3.276657060518732,
"grad_norm": 0.2674623131752014,
"learning_rate": 2.73924630789647e-06,
"loss": 0.0005,
"step": 377
},
{
"epoch": 3.2853025936599423,
"grad_norm": 0.20955437421798706,
"learning_rate": 2.7259922420626705e-06,
"loss": 0.0006,
"step": 378
},
{
"epoch": 3.2939481268011526,
"grad_norm": 0.29623010754585266,
"learning_rate": 2.7127317687345973e-06,
"loss": 0.0018,
"step": 379
},
{
"epoch": 3.302593659942363,
"grad_norm": 0.2673618197441101,
"learning_rate": 2.699465263882708e-06,
"loss": 0.0016,
"step": 380
},
{
"epoch": 3.3112391930835736,
"grad_norm": 0.061357282102108,
"learning_rate": 2.686193103648472e-06,
"loss": 0.0004,
"step": 381
},
{
"epoch": 3.319884726224784,
"grad_norm": 0.1466841846704483,
"learning_rate": 2.672915664333704e-06,
"loss": 0.0007,
"step": 382
},
{
"epoch": 3.3285302593659942,
"grad_norm": 0.10404661297798157,
"learning_rate": 2.6596333223898934e-06,
"loss": 0.0003,
"step": 383
},
{
"epoch": 3.3371757925072045,
"grad_norm": 0.1709136813879013,
"learning_rate": 2.6463464544075344e-06,
"loss": 0.0002,
"step": 384
},
{
"epoch": 3.345821325648415,
"grad_norm": 0.08438851684331894,
"learning_rate": 2.6330554371054466e-06,
"loss": 0.0003,
"step": 385
},
{
"epoch": 3.3544668587896256,
"grad_norm": 0.15904630720615387,
"learning_rate": 2.6197606473200924e-06,
"loss": 0.001,
"step": 386
},
{
"epoch": 3.363112391930836,
"grad_norm": 0.1417740285396576,
"learning_rate": 2.6064624619948966e-06,
"loss": 0.0003,
"step": 387
},
{
"epoch": 3.371757925072046,
"grad_norm": 0.8411481380462646,
"learning_rate": 2.593161258169554e-06,
"loss": 0.0016,
"step": 388
},
{
"epoch": 3.3804034582132565,
"grad_norm": 0.13281837105751038,
"learning_rate": 2.579857412969345e-06,
"loss": 0.0006,
"step": 389
},
{
"epoch": 3.389048991354467,
"grad_norm": 0.06628571450710297,
"learning_rate": 2.5665513035944373e-06,
"loss": 0.0003,
"step": 390
},
{
"epoch": 3.397694524495677,
"grad_norm": 0.2239917516708374,
"learning_rate": 2.5532433073091967e-06,
"loss": 0.0005,
"step": 391
},
{
"epoch": 3.4063400576368874,
"grad_norm": 0.1752793937921524,
"learning_rate": 2.539933801431487e-06,
"loss": 0.0005,
"step": 392
},
{
"epoch": 3.414985590778098,
"grad_norm": 0.0841381773352623,
"learning_rate": 2.5266231633219733e-06,
"loss": 0.0004,
"step": 393
},
{
"epoch": 3.4236311239193085,
"grad_norm": 0.0805194228887558,
"learning_rate": 2.513311770373421e-06,
"loss": 0.0002,
"step": 394
},
{
"epoch": 3.4322766570605188,
"grad_norm": 0.07594503462314606,
"learning_rate": 2.5e-06,
"loss": 0.0002,
"step": 395
},
{
"epoch": 3.440922190201729,
"grad_norm": 0.10026411712169647,
"learning_rate": 2.4866882296265797e-06,
"loss": 0.0003,
"step": 396
},
{
"epoch": 3.4495677233429394,
"grad_norm": 0.05829321965575218,
"learning_rate": 2.473376836678028e-06,
"loss": 0.0003,
"step": 397
},
{
"epoch": 3.4582132564841497,
"grad_norm": 0.4893040657043457,
"learning_rate": 2.4600661985685132e-06,
"loss": 0.0014,
"step": 398
},
{
"epoch": 3.46685878962536,
"grad_norm": 0.06461616605520248,
"learning_rate": 2.446756692690804e-06,
"loss": 0.0002,
"step": 399
},
{
"epoch": 3.4755043227665707,
"grad_norm": 0.06487792730331421,
"learning_rate": 2.4334486964055635e-06,
"loss": 0.0002,
"step": 400
},
{
"epoch": 3.484149855907781,
"grad_norm": 0.08846238255500793,
"learning_rate": 2.4201425870306566e-06,
"loss": 0.0005,
"step": 401
},
{
"epoch": 3.4927953890489913,
"grad_norm": 0.5399437546730042,
"learning_rate": 2.406838741830446e-06,
"loss": 0.001,
"step": 402
},
{
"epoch": 3.5014409221902016,
"grad_norm": 0.07543563842773438,
"learning_rate": 2.393537538005104e-06,
"loss": 0.0004,
"step": 403
},
{
"epoch": 3.510086455331412,
"grad_norm": 0.09962673485279083,
"learning_rate": 2.380239352679908e-06,
"loss": 0.0006,
"step": 404
},
{
"epoch": 3.5187319884726227,
"grad_norm": 0.09634942561388016,
"learning_rate": 2.3669445628945543e-06,
"loss": 0.0005,
"step": 405
},
{
"epoch": 3.527377521613833,
"grad_norm": 0.07776723802089691,
"learning_rate": 2.3536535455924656e-06,
"loss": 0.0002,
"step": 406
},
{
"epoch": 3.5360230547550433,
"grad_norm": 0.26890671253204346,
"learning_rate": 2.340366677610107e-06,
"loss": 0.0002,
"step": 407
},
{
"epoch": 3.5446685878962536,
"grad_norm": 0.0821603462100029,
"learning_rate": 2.327084335666297e-06,
"loss": 0.0001,
"step": 408
},
{
"epoch": 3.553314121037464,
"grad_norm": 0.20579983294010162,
"learning_rate": 2.313806896351529e-06,
"loss": 0.0003,
"step": 409
},
{
"epoch": 3.561959654178674,
"grad_norm": 0.024787653237581253,
"learning_rate": 2.300534736117292e-06,
"loss": 0.0001,
"step": 410
},
{
"epoch": 3.5706051873198845,
"grad_norm": 0.04598158970475197,
"learning_rate": 2.2872682312654035e-06,
"loss": 0.0001,
"step": 411
},
{
"epoch": 3.5792507204610953,
"grad_norm": 0.05497601255774498,
"learning_rate": 2.2740077579373303e-06,
"loss": 0.0003,
"step": 412
},
{
"epoch": 3.5878962536023056,
"grad_norm": 0.058321401476860046,
"learning_rate": 2.2607536921035313e-06,
"loss": 0.0002,
"step": 413
},
{
"epoch": 3.596541786743516,
"grad_norm": 0.024839522317051888,
"learning_rate": 2.247506409552795e-06,
"loss": 0.0001,
"step": 414
},
{
"epoch": 3.605187319884726,
"grad_norm": 1.3820059299468994,
"learning_rate": 2.234266285881587e-06,
"loss": 0.0014,
"step": 415
},
{
"epoch": 3.6138328530259365,
"grad_norm": 0.03965742886066437,
"learning_rate": 2.221033696483397e-06,
"loss": 0.0001,
"step": 416
},
{
"epoch": 3.6224783861671472,
"grad_norm": 0.033283866941928864,
"learning_rate": 2.2078090165380992e-06,
"loss": 0.0001,
"step": 417
},
{
"epoch": 3.631123919308357,
"grad_norm": 0.015545632690191269,
"learning_rate": 2.194592621001311e-06,
"loss": 0.0001,
"step": 418
},
{
"epoch": 3.639769452449568,
"grad_norm": 0.03403123468160629,
"learning_rate": 2.1813848845937695e-06,
"loss": 0.0001,
"step": 419
},
{
"epoch": 3.648414985590778,
"grad_norm": 0.04488883912563324,
"learning_rate": 2.1681861817906955e-06,
"loss": 0.0001,
"step": 420
},
{
"epoch": 3.6570605187319885,
"grad_norm": 0.22106269001960754,
"learning_rate": 2.1549968868111866e-06,
"loss": 0.0003,
"step": 421
},
{
"epoch": 3.6657060518731988,
"grad_norm": 0.12951771914958954,
"learning_rate": 2.141817373607601e-06,
"loss": 0.0003,
"step": 422
},
{
"epoch": 3.674351585014409,
"grad_norm": 0.17822900414466858,
"learning_rate": 2.1286480158549583e-06,
"loss": 0.0004,
"step": 423
},
{
"epoch": 3.68299711815562,
"grad_norm": 0.10862100124359131,
"learning_rate": 2.1154891869403436e-06,
"loss": 0.0002,
"step": 424
},
{
"epoch": 3.69164265129683,
"grad_norm": 0.09040237963199615,
"learning_rate": 2.1023412599523204e-06,
"loss": 0.0003,
"step": 425
},
{
"epoch": 3.7002881844380404,
"grad_norm": 0.14486011862754822,
"learning_rate": 2.089204607670352e-06,
"loss": 0.0003,
"step": 426
},
{
"epoch": 3.7089337175792507,
"grad_norm": 0.058098066598176956,
"learning_rate": 2.0760796025542342e-06,
"loss": 0.0002,
"step": 427
},
{
"epoch": 3.717579250720461,
"grad_norm": 0.13733893632888794,
"learning_rate": 2.0629666167335344e-06,
"loss": 0.0002,
"step": 428
},
{
"epoch": 3.7262247838616713,
"grad_norm": 0.12006496638059616,
"learning_rate": 2.0498660219970395e-06,
"loss": 0.0007,
"step": 429
},
{
"epoch": 3.7348703170028816,
"grad_norm": 0.07560478895902634,
"learning_rate": 2.0367781897822147e-06,
"loss": 0.0001,
"step": 430
},
{
"epoch": 3.7435158501440924,
"grad_norm": 0.021039756014943123,
"learning_rate": 2.0237034911646745e-06,
"loss": 0.0001,
"step": 431
},
{
"epoch": 3.7521613832853027,
"grad_norm": 0.010244839824736118,
"learning_rate": 2.0106422968476606e-06,
"loss": 0.0,
"step": 432
},
{
"epoch": 3.760806916426513,
"grad_norm": 0.10909762978553772,
"learning_rate": 1.9975949771515296e-06,
"loss": 0.0001,
"step": 433
},
{
"epoch": 3.7694524495677233,
"grad_norm": 0.05506197735667229,
"learning_rate": 1.9845619020032552e-06,
"loss": 0.0002,
"step": 434
},
{
"epoch": 3.7780979827089336,
"grad_norm": 0.19995303452014923,
"learning_rate": 1.9715434409259393e-06,
"loss": 0.0011,
"step": 435
},
{
"epoch": 3.7867435158501443,
"grad_norm": 0.009247799403965473,
"learning_rate": 1.958539963028337e-06,
"loss": 0.0,
"step": 436
},
{
"epoch": 3.795389048991354,
"grad_norm": 0.015599964186549187,
"learning_rate": 1.9455518369943873e-06,
"loss": 0.0001,
"step": 437
},
{
"epoch": 3.804034582132565,
"grad_norm": 0.15373478829860687,
"learning_rate": 1.9325794310727626e-06,
"loss": 0.0006,
"step": 438
},
{
"epoch": 3.8126801152737753,
"grad_norm": 0.05203329771757126,
"learning_rate": 1.9196231130664282e-06,
"loss": 0.0002,
"step": 439
},
{
"epoch": 3.8213256484149856,
"grad_norm": 0.08206374943256378,
"learning_rate": 1.906683250322213e-06,
"loss": 0.0003,
"step": 440
},
{
"epoch": 3.829971181556196,
"grad_norm": 0.06621237844228745,
"learning_rate": 1.8937602097203945e-06,
"loss": 0.0001,
"step": 441
},
{
"epoch": 3.838616714697406,
"grad_norm": 0.3289477527141571,
"learning_rate": 1.8808543576642966e-06,
"loss": 0.0016,
"step": 442
},
{
"epoch": 3.847262247838617,
"grad_norm": 0.14183855056762695,
"learning_rate": 1.8679660600698996e-06,
"loss": 0.0002,
"step": 443
},
{
"epoch": 3.8559077809798272,
"grad_norm": 0.43348291516304016,
"learning_rate": 1.8550956823554708e-06,
"loss": 0.0004,
"step": 444
},
{
"epoch": 3.8645533141210375,
"grad_norm": 0.3135347068309784,
"learning_rate": 1.8422435894311973e-06,
"loss": 0.0022,
"step": 445
},
{
"epoch": 3.873198847262248,
"grad_norm": 0.04297763854265213,
"learning_rate": 1.8294101456888433e-06,
"loss": 0.0002,
"step": 446
},
{
"epoch": 3.881844380403458,
"grad_norm": 0.03995918110013008,
"learning_rate": 1.8165957149914182e-06,
"loss": 0.0001,
"step": 447
},
{
"epoch": 3.8904899135446684,
"grad_norm": 0.05494571849703789,
"learning_rate": 1.8038006606628599e-06,
"loss": 0.0002,
"step": 448
},
{
"epoch": 3.8991354466858787,
"grad_norm": 0.00816399697214365,
"learning_rate": 1.7910253454777346e-06,
"loss": 0.0,
"step": 449
},
{
"epoch": 3.9077809798270895,
"grad_norm": 0.012747962959110737,
"learning_rate": 1.7782701316509482e-06,
"loss": 0.0001,
"step": 450
},
{
"epoch": 3.9164265129683,
"grad_norm": 0.055127181112766266,
"learning_rate": 1.7655353808274795e-06,
"loss": 0.0001,
"step": 451
},
{
"epoch": 3.92507204610951,
"grad_norm": 0.33472880721092224,
"learning_rate": 1.752821454072124e-06,
"loss": 0.0006,
"step": 452
},
{
"epoch": 3.9337175792507204,
"grad_norm": 0.06558925658464432,
"learning_rate": 1.7401287118592626e-06,
"loss": 0.0002,
"step": 453
},
{
"epoch": 3.9423631123919307,
"grad_norm": 0.06597518920898438,
"learning_rate": 1.7274575140626318e-06,
"loss": 0.0003,
"step": 454
},
{
"epoch": 3.9510086455331415,
"grad_norm": 0.06961897015571594,
"learning_rate": 1.7148082199451288e-06,
"loss": 0.0002,
"step": 455
},
{
"epoch": 3.9596541786743513,
"grad_norm": 0.07268885523080826,
"learning_rate": 1.7021811881486186e-06,
"loss": 0.0004,
"step": 456
},
{
"epoch": 3.968299711815562,
"grad_norm": 0.05247066915035248,
"learning_rate": 1.6895767766837734e-06,
"loss": 0.0001,
"step": 457
},
{
"epoch": 3.9769452449567724,
"grad_norm": 0.12619982659816742,
"learning_rate": 1.6769953429199142e-06,
"loss": 0.0005,
"step": 458
},
{
"epoch": 3.9855907780979827,
"grad_norm": 0.043070387095212936,
"learning_rate": 1.6644372435748823e-06,
"loss": 0.0002,
"step": 459
},
{
"epoch": 3.994236311239193,
"grad_norm": 0.029151560738682747,
"learning_rate": 1.6519028347049242e-06,
"loss": 0.0,
"step": 460
},
{
"epoch": 4.008645533141211,
"grad_norm": 0.25047677755355835,
"learning_rate": 1.6393924716946003e-06,
"loss": 0.0005,
"step": 461
},
{
"epoch": 4.017291066282421,
"grad_norm": 0.03903050720691681,
"learning_rate": 1.626906509246701e-06,
"loss": 0.0001,
"step": 462
},
{
"epoch": 4.025936599423631,
"grad_norm": 0.06342215090990067,
"learning_rate": 1.6144453013721978e-06,
"loss": 0.0002,
"step": 463
},
{
"epoch": 4.034582132564841,
"grad_norm": 0.020777089521288872,
"learning_rate": 1.6020092013802002e-06,
"loss": 0.0,
"step": 464
},
{
"epoch": 4.043227665706052,
"grad_norm": 0.01243128813803196,
"learning_rate": 1.5895985618679447e-06,
"loss": 0.0001,
"step": 465
},
{
"epoch": 4.051873198847262,
"grad_norm": 0.016826143488287926,
"learning_rate": 1.5772137347107902e-06,
"loss": 0.001,
"step": 466
},
{
"epoch": 4.060518731988473,
"grad_norm": 0.25355905294418335,
"learning_rate": 1.5648550710522467e-06,
"loss": 0.0001,
"step": 467
},
{
"epoch": 4.069164265129683,
"grad_norm": 0.0483023077249527,
"learning_rate": 1.5525229212940168e-06,
"loss": 0.0002,
"step": 468
},
{
"epoch": 4.077809798270893,
"grad_norm": 0.04395914822816849,
"learning_rate": 1.5402176350860653e-06,
"loss": 0.0002,
"step": 469
},
{
"epoch": 4.086455331412104,
"grad_norm": 0.026249030604958534,
"learning_rate": 1.5279395613166987e-06,
"loss": 0.0001,
"step": 470
},
{
"epoch": 4.095100864553314,
"grad_norm": 0.019938549026846886,
"learning_rate": 1.5156890481026786e-06,
"loss": 0.0001,
"step": 471
},
{
"epoch": 4.1037463976945245,
"grad_norm": 0.01866944693028927,
"learning_rate": 1.5034664427793486e-06,
"loss": 0.0,
"step": 472
},
{
"epoch": 4.112391930835735,
"grad_norm": 0.013607538305222988,
"learning_rate": 1.4912720918907906e-06,
"loss": 0.0,
"step": 473
},
{
"epoch": 4.121037463976945,
"grad_norm": 0.06787893176078796,
"learning_rate": 1.4791063411799938e-06,
"loss": 0.0003,
"step": 474
},
{
"epoch": 4.129682997118156,
"grad_norm": 0.019246675074100494,
"learning_rate": 1.4669695355790552e-06,
"loss": 0.0,
"step": 475
},
{
"epoch": 4.138328530259366,
"grad_norm": 0.0765075534582138,
"learning_rate": 1.4548620191994e-06,
"loss": 0.0003,
"step": 476
},
{
"epoch": 4.1469740634005765,
"grad_norm": 0.03727831318974495,
"learning_rate": 1.4427841353220224e-06,
"loss": 0.0002,
"step": 477
},
{
"epoch": 4.155619596541786,
"grad_norm": 0.0503949299454689,
"learning_rate": 1.4307362263877584e-06,
"loss": 0.0002,
"step": 478
},
{
"epoch": 4.164265129682997,
"grad_norm": 0.03264373913407326,
"learning_rate": 1.4187186339875697e-06,
"loss": 0.0002,
"step": 479
},
{
"epoch": 4.172910662824208,
"grad_norm": 0.061017073690891266,
"learning_rate": 1.4067316988528618e-06,
"loss": 0.0,
"step": 480
},
{
"epoch": 4.181556195965418,
"grad_norm": 0.06418503820896149,
"learning_rate": 1.3947757608458263e-06,
"loss": 0.0002,
"step": 481
},
{
"epoch": 4.1902017291066285,
"grad_norm": 0.021499959751963615,
"learning_rate": 1.3828511589497977e-06,
"loss": 0.0,
"step": 482
},
{
"epoch": 4.198847262247838,
"grad_norm": 0.008911274373531342,
"learning_rate": 1.3709582312596519e-06,
"loss": 0.0,
"step": 483
},
{
"epoch": 4.207492795389049,
"grad_norm": 0.007865059189498425,
"learning_rate": 1.3590973149722103e-06,
"loss": 0.0,
"step": 484
},
{
"epoch": 4.216138328530259,
"grad_norm": 0.0180173609405756,
"learning_rate": 1.347268746376685e-06,
"loss": 0.0,
"step": 485
},
{
"epoch": 4.22478386167147,
"grad_norm": 0.0172914769500494,
"learning_rate": 1.3354728608451462e-06,
"loss": 0.0,
"step": 486
},
{
"epoch": 4.23342939481268,
"grad_norm": 0.015868855640292168,
"learning_rate": 1.3237099928230066e-06,
"loss": 0.0,
"step": 487
},
{
"epoch": 4.24207492795389,
"grad_norm": 0.02281869202852249,
"learning_rate": 1.3119804758195443e-06,
"loss": 0.0001,
"step": 488
},
{
"epoch": 4.250720461095101,
"grad_norm": 0.06229427456855774,
"learning_rate": 1.300284642398445e-06,
"loss": 0.0001,
"step": 489
},
{
"epoch": 4.259365994236311,
"grad_norm": 0.04283831641077995,
"learning_rate": 1.288622824168375e-06,
"loss": 0.0001,
"step": 490
},
{
"epoch": 4.268011527377522,
"grad_norm": 0.023561948910355568,
"learning_rate": 1.276995351773575e-06,
"loss": 0.0001,
"step": 491
},
{
"epoch": 4.276657060518732,
"grad_norm": 0.04599250480532646,
"learning_rate": 1.26540255488449e-06,
"loss": 0.0001,
"step": 492
},
{
"epoch": 4.285302593659942,
"grad_norm": 0.03455409035086632,
"learning_rate": 1.2538447621884173e-06,
"loss": 0.0001,
"step": 493
},
{
"epoch": 4.293948126801153,
"grad_norm": 0.06476529687643051,
"learning_rate": 1.2423223013801946e-06,
"loss": 0.0002,
"step": 494
},
{
"epoch": 4.302593659942363,
"grad_norm": 0.007448709569871426,
"learning_rate": 1.230835499152901e-06,
"loss": 0.0001,
"step": 495
},
{
"epoch": 4.311239193083574,
"grad_norm": 0.024221738800406456,
"learning_rate": 1.219384681188598e-06,
"loss": 0.0,
"step": 496
},
{
"epoch": 4.3198847262247835,
"grad_norm": 0.013653815723955631,
"learning_rate": 1.207970172149098e-06,
"loss": 0.0,
"step": 497
},
{
"epoch": 4.328530259365994,
"grad_norm": 0.0163737740367651,
"learning_rate": 1.1965922956667536e-06,
"loss": 0.0001,
"step": 498
},
{
"epoch": 4.337175792507205,
"grad_norm": 0.04039165377616882,
"learning_rate": 1.1852513743352886e-06,
"loss": 0.0001,
"step": 499
},
{
"epoch": 4.345821325648415,
"grad_norm": 0.01361683290451765,
"learning_rate": 1.1739477297006442e-06,
"loss": 0.0001,
"step": 500
},
{
"epoch": 4.354466858789626,
"grad_norm": 0.05603167042136192,
"learning_rate": 1.1626816822518662e-06,
"loss": 0.0002,
"step": 501
},
{
"epoch": 4.363112391930835,
"grad_norm": 0.1649046540260315,
"learning_rate": 1.1514535514120217e-06,
"loss": 0.0003,
"step": 502
},
{
"epoch": 4.371757925072046,
"grad_norm": 0.06988149136304855,
"learning_rate": 1.140263655529135e-06,
"loss": 0.0001,
"step": 503
},
{
"epoch": 4.380403458213257,
"grad_norm": 0.01356775127351284,
"learning_rate": 1.1291123118671665e-06,
"loss": 0.0,
"step": 504
},
{
"epoch": 4.389048991354467,
"grad_norm": 0.011344455182552338,
"learning_rate": 1.1179998365970174e-06,
"loss": 0.0001,
"step": 505
},
{
"epoch": 4.3976945244956775,
"grad_norm": 0.015661202371120453,
"learning_rate": 1.1069265447875618e-06,
"loss": 0.0,
"step": 506
},
{
"epoch": 4.406340057636887,
"grad_norm": 0.13560903072357178,
"learning_rate": 1.0958927503967207e-06,
"loss": 0.0006,
"step": 507
},
{
"epoch": 4.414985590778098,
"grad_norm": 0.13171304762363434,
"learning_rate": 1.0848987662625516e-06,
"loss": 0.0,
"step": 508
},
{
"epoch": 4.423631123919308,
"grad_norm": 0.021776214241981506,
"learning_rate": 1.073944904094385e-06,
"loss": 0.0,
"step": 509
},
{
"epoch": 4.432276657060519,
"grad_norm": 0.004369642119854689,
"learning_rate": 1.0630314744639831e-06,
"loss": 0.0,
"step": 510
},
{
"epoch": 4.4409221902017295,
"grad_norm": 0.05059368908405304,
"learning_rate": 1.052158786796739e-06,
"loss": 0.0001,
"step": 511
},
{
"epoch": 4.449567723342939,
"grad_norm": 0.009102565236389637,
"learning_rate": 1.0413271493628964e-06,
"loss": 0.0,
"step": 512
},
{
"epoch": 4.45821325648415,
"grad_norm": 0.013081284239888191,
"learning_rate": 1.0305368692688175e-06,
"loss": 0.0,
"step": 513
},
{
"epoch": 4.46685878962536,
"grad_norm": 0.07242248952388763,
"learning_rate": 1.019788252448267e-06,
"loss": 0.0003,
"step": 514
},
{
"epoch": 4.475504322766571,
"grad_norm": 0.006698797456920147,
"learning_rate": 1.0090816036537462e-06,
"loss": 0.0,
"step": 515
},
{
"epoch": 4.484149855907781,
"grad_norm": 0.02764168381690979,
"learning_rate": 9.984172264478476e-07,
"loss": 0.0001,
"step": 516
},
{
"epoch": 4.492795389048991,
"grad_norm": 0.006561846937984228,
"learning_rate": 9.877954231946485e-07,
"loss": 0.0,
"step": 517
},
{
"epoch": 4.501440922190202,
"grad_norm": 0.02488504722714424,
"learning_rate": 9.772164950511387e-07,
"loss": 0.0001,
"step": 518
},
{
"epoch": 4.510086455331412,
"grad_norm": 0.01242034137248993,
"learning_rate": 9.66680741958685e-07,
"loss": 0.0001,
"step": 519
},
{
"epoch": 4.518731988472623,
"grad_norm": 0.16501882672309875,
"learning_rate": 9.561884626345206e-07,
"loss": 0.0002,
"step": 520
},
{
"epoch": 4.527377521613833,
"grad_norm": 0.0346071794629097,
"learning_rate": 9.457399545632815e-07,
"loss": 0.0001,
"step": 521
},
{
"epoch": 4.536023054755043,
"grad_norm": 0.010261784307658672,
"learning_rate": 9.353355139885672e-07,
"loss": 0.0,
"step": 522
},
{
"epoch": 4.544668587896254,
"grad_norm": 0.017157072201371193,
"learning_rate": 9.249754359045485e-07,
"loss": 0.0004,
"step": 523
},
{
"epoch": 4.553314121037464,
"grad_norm": 0.09536877274513245,
"learning_rate": 9.146600140475945e-07,
"loss": 0.0001,
"step": 524
},
{
"epoch": 4.561959654178675,
"grad_norm": 0.03881097957491875,
"learning_rate": 9.043895408879505e-07,
"loss": 0.0001,
"step": 525
},
{
"epoch": 4.5706051873198845,
"grad_norm": 0.003924703225493431,
"learning_rate": 8.941643076214438e-07,
"loss": 0.0,
"step": 526
},
{
"epoch": 4.579250720461095,
"grad_norm": 0.010769468732178211,
"learning_rate": 8.839846041612296e-07,
"loss": 0.0001,
"step": 527
},
{
"epoch": 4.587896253602305,
"grad_norm": 0.03893817961215973,
"learning_rate": 8.738507191295658e-07,
"loss": 0.0,
"step": 528
},
{
"epoch": 4.596541786743516,
"grad_norm": 0.010444357059895992,
"learning_rate": 8.637629398496378e-07,
"loss": 0.0001,
"step": 529
},
{
"epoch": 4.605187319884726,
"grad_norm": 0.018974248319864273,
"learning_rate": 8.537215523374037e-07,
"loss": 0.0001,
"step": 530
},
{
"epoch": 4.6138328530259365,
"grad_norm": 0.04680433124303818,
"learning_rate": 8.437268412934898e-07,
"loss": 0.0001,
"step": 531
},
{
"epoch": 4.622478386167147,
"grad_norm": 0.02349163219332695,
"learning_rate": 8.337790900951201e-07,
"loss": 0.0,
"step": 532
},
{
"epoch": 4.631123919308357,
"grad_norm": 0.019928090274333954,
"learning_rate": 8.238785807880767e-07,
"loss": 0.0001,
"step": 533
},
{
"epoch": 4.639769452449568,
"grad_norm": 0.009950011037290096,
"learning_rate": 8.140255940787059e-07,
"loss": 0.0001,
"step": 534
},
{
"epoch": 4.648414985590778,
"grad_norm": 0.034434814006090164,
"learning_rate": 8.042204093259598e-07,
"loss": 0.0,
"step": 535
},
{
"epoch": 4.6570605187319885,
"grad_norm": 0.008066104725003242,
"learning_rate": 7.944633045334762e-07,
"loss": 0.0,
"step": 536
},
{
"epoch": 4.665706051873199,
"grad_norm": 0.0360458604991436,
"learning_rate": 7.847545563416936e-07,
"loss": 0.0001,
"step": 537
},
{
"epoch": 4.674351585014409,
"grad_norm": 0.006509549915790558,
"learning_rate": 7.750944400200103e-07,
"loss": 0.0001,
"step": 538
},
{
"epoch": 4.68299711815562,
"grad_norm": 0.11465685069561005,
"learning_rate": 7.654832294589776e-07,
"loss": 0.0,
"step": 539
},
{
"epoch": 4.69164265129683,
"grad_norm": 0.03756583854556084,
"learning_rate": 7.559211971625385e-07,
"loss": 0.0001,
"step": 540
},
{
"epoch": 4.70028818443804,
"grad_norm": 0.01114535704255104,
"learning_rate": 7.46408614240296e-07,
"loss": 0.0,
"step": 541
},
{
"epoch": 4.708933717579251,
"grad_norm": 0.021791907027363777,
"learning_rate": 7.369457503998287e-07,
"loss": 0.0,
"step": 542
},
{
"epoch": 4.717579250720461,
"grad_norm": 0.012248961254954338,
"learning_rate": 7.275328739390466e-07,
"loss": 0.0,
"step": 543
},
{
"epoch": 4.726224783861672,
"grad_norm": 0.006273179780691862,
"learning_rate": 7.181702517385789e-07,
"loss": 0.0,
"step": 544
},
{
"epoch": 4.734870317002882,
"grad_norm": 0.021544231101870537,
"learning_rate": 7.088581492542121e-07,
"loss": 0.0001,
"step": 545
},
{
"epoch": 4.743515850144092,
"grad_norm": 0.016482336446642876,
"learning_rate": 6.995968305093604e-07,
"loss": 0.0001,
"step": 546
},
{
"epoch": 4.752161383285302,
"grad_norm": 0.035197075456380844,
"learning_rate": 6.903865580875796e-07,
"loss": 0.0,
"step": 547
},
{
"epoch": 4.760806916426513,
"grad_norm": 0.03350463882088661,
"learning_rate": 6.81227593125127e-07,
"loss": 0.0001,
"step": 548
},
{
"epoch": 4.769452449567723,
"grad_norm": 0.025157498195767403,
"learning_rate": 6.721201953035511e-07,
"loss": 0.0001,
"step": 549
},
{
"epoch": 4.778097982708934,
"grad_norm": 0.013705157674849033,
"learning_rate": 6.630646228423324e-07,
"loss": 0.0,
"step": 550
},
{
"epoch": 4.786743515850144,
"grad_norm": 0.05199694633483887,
"learning_rate": 6.54061132491561e-07,
"loss": 0.0001,
"step": 551
},
{
"epoch": 4.795389048991354,
"grad_norm": 0.021124713122844696,
"learning_rate": 6.451099795246604e-07,
"loss": 0.0001,
"step": 552
},
{
"epoch": 4.804034582132565,
"grad_norm": 0.011140077374875546,
"learning_rate": 6.362114177311438e-07,
"loss": 0.0,
"step": 553
},
{
"epoch": 4.812680115273775,
"grad_norm": 0.01086009107530117,
"learning_rate": 6.273656994094232e-07,
"loss": 0.0001,
"step": 554
},
{
"epoch": 4.821325648414986,
"grad_norm": 0.029642069712281227,
"learning_rate": 6.185730753596538e-07,
"loss": 0.0,
"step": 555
},
{
"epoch": 4.829971181556196,
"grad_norm": 0.014636816456913948,
"learning_rate": 6.098337948766256e-07,
"loss": 0.0,
"step": 556
},
{
"epoch": 4.838616714697406,
"grad_norm": 0.0111089451238513,
"learning_rate": 6.011481057426916e-07,
"loss": 0.0,
"step": 557
},
{
"epoch": 4.847262247838617,
"grad_norm": 0.008355767466127872,
"learning_rate": 5.925162542207441e-07,
"loss": 0.0001,
"step": 558
},
{
"epoch": 4.855907780979827,
"grad_norm": 0.020910680294036865,
"learning_rate": 5.839384850472359e-07,
"loss": 0.0,
"step": 559
},
{
"epoch": 4.8645533141210375,
"grad_norm": 0.023057645186781883,
"learning_rate": 5.754150414252341e-07,
"loss": 0.0,
"step": 560
},
{
"epoch": 4.873198847262248,
"grad_norm": 0.04212991148233414,
"learning_rate": 5.669461650175326e-07,
"loss": 0.0001,
"step": 561
},
{
"epoch": 4.881844380403458,
"grad_norm": 0.004758734721690416,
"learning_rate": 5.585320959397935e-07,
"loss": 0.0,
"step": 562
},
{
"epoch": 4.890489913544669,
"grad_norm": 0.006756670773029327,
"learning_rate": 5.501730727537436e-07,
"loss": 0.0,
"step": 563
},
{
"epoch": 4.899135446685879,
"grad_norm": 0.014876470901072025,
"learning_rate": 5.418693324604082e-07,
"loss": 0.0,
"step": 564
},
{
"epoch": 4.9077809798270895,
"grad_norm": 0.008136936463415623,
"learning_rate": 5.336211104933939e-07,
"loss": 0.0,
"step": 565
},
{
"epoch": 4.916426512968299,
"grad_norm": 0.006519688293337822,
"learning_rate": 5.254286407122103e-07,
"loss": 0.0,
"step": 566
},
{
"epoch": 4.92507204610951,
"grad_norm": 0.016903480514883995,
"learning_rate": 5.172921553956417e-07,
"loss": 0.0,
"step": 567
},
{
"epoch": 4.93371757925072,
"grad_norm": 0.003450587159022689,
"learning_rate": 5.092118852351599e-07,
"loss": 0.0,
"step": 568
},
{
"epoch": 4.942363112391931,
"grad_norm": 0.028615275397896767,
"learning_rate": 5.01188059328386e-07,
"loss": 0.0001,
"step": 569
},
{
"epoch": 4.9510086455331415,
"grad_norm": 0.011685383506119251,
"learning_rate": 4.932209051725915e-07,
"loss": 0.0,
"step": 570
},
{
"epoch": 4.959654178674351,
"grad_norm": 0.006484777666628361,
"learning_rate": 4.853106486582499e-07,
"loss": 0.0,
"step": 571
},
{
"epoch": 4.968299711815562,
"grad_norm": 0.006807015277445316,
"learning_rate": 4.774575140626317e-07,
"loss": 0.0,
"step": 572
},
{
"epoch": 4.976945244956772,
"grad_norm": 0.005784816108644009,
"learning_rate": 4.6966172404344754e-07,
"loss": 0.0,
"step": 573
},
{
"epoch": 4.985590778097983,
"grad_norm": 0.0060491194017231464,
"learning_rate": 4.619234996325314e-07,
"loss": 0.0001,
"step": 574
},
{
"epoch": 4.994236311239193,
"grad_norm": 0.08785240352153778,
"learning_rate": 4.542430602295775e-07,
"loss": 0.0003,
"step": 575
},
{
"epoch": 5.008645533141211,
"grad_norm": 0.11832298338413239,
"learning_rate": 4.4662062359591584e-07,
"loss": 0.0001,
"step": 576
},
{
"epoch": 5.017291066282421,
"grad_norm": 0.006518571171909571,
"learning_rate": 4.3905640584834293e-07,
"loss": 0.0,
"step": 577
},
{
"epoch": 5.025936599423631,
"grad_norm": 0.008394552394747734,
"learning_rate": 4.3155062145298994e-07,
"loss": 0.0,
"step": 578
},
{
"epoch": 5.034582132564841,
"grad_norm": 0.014321188442409039,
"learning_rate": 4.241034832192434e-07,
"loss": 0.0,
"step": 579
},
{
"epoch": 5.043227665706052,
"grad_norm": 0.01109258271753788,
"learning_rate": 4.167152022937124e-07,
"loss": 0.0,
"step": 580
},
{
"epoch": 5.051873198847262,
"grad_norm": 0.00782827753573656,
"learning_rate": 4.0938598815424226e-07,
"loss": 0.0,
"step": 581
},
{
"epoch": 5.060518731988473,
"grad_norm": 0.006567042786628008,
"learning_rate": 4.0211604860397294e-07,
"loss": 0.0001,
"step": 582
},
{
"epoch": 5.069164265129683,
"grad_norm": 0.032781753689050674,
"learning_rate": 3.9490558976544967e-07,
"loss": 0.0,
"step": 583
},
{
"epoch": 5.077809798270893,
"grad_norm": 0.006290372461080551,
"learning_rate": 3.877548160747768e-07,
"loss": 0.0001,
"step": 584
},
{
"epoch": 5.086455331412104,
"grad_norm": 0.05169885233044624,
"learning_rate": 3.8066393027582276e-07,
"loss": 0.0001,
"step": 585
},
{
"epoch": 5.095100864553314,
"grad_norm": 0.024971339851617813,
"learning_rate": 3.7363313341447326e-07,
"loss": 0.0001,
"step": 586
},
{
"epoch": 5.1037463976945245,
"grad_norm": 0.007651512045413256,
"learning_rate": 3.666626248329272e-07,
"loss": 0.0,
"step": 587
},
{
"epoch": 5.112391930835735,
"grad_norm": 0.014207379892468452,
"learning_rate": 3.5975260216404714e-07,
"loss": 0.0,
"step": 588
},
{
"epoch": 5.121037463976945,
"grad_norm": 0.01968054287135601,
"learning_rate": 3.529032613257574e-07,
"loss": 0.0,
"step": 589
},
{
"epoch": 5.129682997118156,
"grad_norm": 0.015984253957867622,
"learning_rate": 3.4611479651548456e-07,
"loss": 0.0,
"step": 590
},
{
"epoch": 5.138328530259366,
"grad_norm": 0.008436279371380806,
"learning_rate": 3.393874002046577e-07,
"loss": 0.0001,
"step": 591
},
{
"epoch": 5.1469740634005765,
"grad_norm": 0.04552963003516197,
"learning_rate": 3.327212631332452e-07,
"loss": 0.0,
"step": 592
},
{
"epoch": 5.155619596541786,
"grad_norm": 0.015880795195698738,
"learning_rate": 3.2611657430435013e-07,
"loss": 0.0001,
"step": 593
},
{
"epoch": 5.164265129682997,
"grad_norm": 0.03732914850115776,
"learning_rate": 3.195735209788528e-07,
"loss": 0.0,
"step": 594
},
{
"epoch": 5.172910662824208,
"grad_norm": 0.007819831371307373,
"learning_rate": 3.130922886700968e-07,
"loss": 0.0,
"step": 595
},
{
"epoch": 5.181556195965418,
"grad_norm": 0.014321624301373959,
"learning_rate": 3.066730611386337e-07,
"loss": 0.0,
"step": 596
},
{
"epoch": 5.1902017291066285,
"grad_norm": 0.008290286175906658,
"learning_rate": 3.003160203870095e-07,
"loss": 0.0,
"step": 597
},
{
"epoch": 5.198847262247838,
"grad_norm": 0.006545263342559338,
"learning_rate": 2.940213466546085e-07,
"loss": 0.0,
"step": 598
},
{
"epoch": 5.207492795389049,
"grad_norm": 0.006481291726231575,
"learning_rate": 2.8778921841253774e-07,
"loss": 0.0002,
"step": 599
},
{
"epoch": 5.216138328530259,
"grad_norm": 0.07197843492031097,
"learning_rate": 2.8161981235857145e-07,
"loss": 0.0,
"step": 600
},
{
"epoch": 5.22478386167147,
"grad_norm": 0.014453639276325703,
"learning_rate": 2.7551330341213794e-07,
"loss": 0.0,
"step": 601
},
{
"epoch": 5.23342939481268,
"grad_norm": 0.006451854016631842,
"learning_rate": 2.6946986470936354e-07,
"loss": 0.0,
"step": 602
},
{
"epoch": 5.24207492795389,
"grad_norm": 0.009162579663097858,
"learning_rate": 2.634896675981599e-07,
"loss": 0.0001,
"step": 603
},
{
"epoch": 5.250720461095101,
"grad_norm": 0.029183892533183098,
"learning_rate": 2.5757288163336806e-07,
"loss": 0.0001,
"step": 604
},
{
"epoch": 5.259365994236311,
"grad_norm": 0.06650669872760773,
"learning_rate": 2.517196745719522e-07,
"loss": 0.0001,
"step": 605
},
{
"epoch": 5.268011527377522,
"grad_norm": 0.04278871789574623,
"learning_rate": 2.4593021236823916e-07,
"loss": 0.0,
"step": 606
},
{
"epoch": 5.276657060518732,
"grad_norm": 0.005027587525546551,
"learning_rate": 2.4020465916921866e-07,
"loss": 0.0,
"step": 607
},
{
"epoch": 5.285302593659942,
"grad_norm": 0.012285275384783745,
"learning_rate": 2.3454317730988414e-07,
"loss": 0.0,
"step": 608
},
{
"epoch": 5.293948126801153,
"grad_norm": 0.012882730923593044,
"learning_rate": 2.2894592730863336e-07,
"loss": 0.0,
"step": 609
},
{
"epoch": 5.302593659942363,
"grad_norm": 0.026212921366095543,
"learning_rate": 2.2341306786271695e-07,
"loss": 0.0001,
"step": 610
},
{
"epoch": 5.311239193083574,
"grad_norm": 0.007668660953640938,
"learning_rate": 2.1794475584373724e-07,
"loss": 0.0,
"step": 611
},
{
"epoch": 5.3198847262247835,
"grad_norm": 0.007228130474686623,
"learning_rate": 2.1254114629320228e-07,
"loss": 0.0,
"step": 612
},
{
"epoch": 5.328530259365994,
"grad_norm": 0.007553818169981241,
"learning_rate": 2.072023924181285e-07,
"loss": 0.0,
"step": 613
},
{
"epoch": 5.337175792507205,
"grad_norm": 0.028273316100239754,
"learning_rate": 2.019286455866981e-07,
"loss": 0.0001,
"step": 614
},
{
"epoch": 5.345821325648415,
"grad_norm": 0.017403678968548775,
"learning_rate": 1.967200553239676e-07,
"loss": 0.0,
"step": 615
},
{
"epoch": 5.354466858789626,
"grad_norm": 0.05251264199614525,
"learning_rate": 1.9157676930762702e-07,
"loss": 0.0,
"step": 616
},
{
"epoch": 5.363112391930835,
"grad_norm": 0.0052774338982999325,
"learning_rate": 1.8649893336381315e-07,
"loss": 0.0001,
"step": 617
},
{
"epoch": 5.371757925072046,
"grad_norm": 0.050105899572372437,
"learning_rate": 1.8148669146297564e-07,
"loss": 0.0,
"step": 618
},
{
"epoch": 5.380403458213257,
"grad_norm": 0.010968392714858055,
"learning_rate": 1.7654018571579557e-07,
"loss": 0.0,
"step": 619
},
{
"epoch": 5.389048991354467,
"grad_norm": 0.006572465877979994,
"learning_rate": 1.7165955636915395e-07,
"loss": 0.0,
"step": 620
},
{
"epoch": 5.3976945244956775,
"grad_norm": 0.015997696667909622,
"learning_rate": 1.6684494180215837e-07,
"loss": 0.0,
"step": 621
},
{
"epoch": 5.406340057636887,
"grad_norm": 0.01475190743803978,
"learning_rate": 1.620964785222162e-07,
"loss": 0.0,
"step": 622
},
{
"epoch": 5.414985590778098,
"grad_norm": 0.006957387086004019,
"learning_rate": 1.5741430116116812e-07,
"loss": 0.0,
"step": 623
},
{
"epoch": 5.423631123919308,
"grad_norm": 0.027358924970030785,
"learning_rate": 1.5279854247146703e-07,
"loss": 0.0,
"step": 624
},
{
"epoch": 5.432276657060519,
"grad_norm": 0.013768823817372322,
"learning_rate": 1.4824933332241693e-07,
"loss": 0.0,
"step": 625
},
{
"epoch": 5.4409221902017295,
"grad_norm": 0.010743286460638046,
"learning_rate": 1.4376680269646087e-07,
"loss": 0.0,
"step": 626
},
{
"epoch": 5.449567723342939,
"grad_norm": 0.007050006650388241,
"learning_rate": 1.3935107768552558e-07,
"loss": 0.0,
"step": 627
},
{
"epoch": 5.45821325648415,
"grad_norm": 0.012325974181294441,
"learning_rate": 1.3500228348741595e-07,
"loss": 0.0,
"step": 628
},
{
"epoch": 5.46685878962536,
"grad_norm": 0.023326944559812546,
"learning_rate": 1.307205434022671e-07,
"loss": 0.0,
"step": 629
},
{
"epoch": 5.475504322766571,
"grad_norm": 0.018656721338629723,
"learning_rate": 1.265059788290468e-07,
"loss": 0.0,
"step": 630
},
{
"epoch": 5.484149855907781,
"grad_norm": 0.017813757061958313,
"learning_rate": 1.223587092621162e-07,
"loss": 0.0,
"step": 631
},
{
"epoch": 5.492795389048991,
"grad_norm": 0.014615735039114952,
"learning_rate": 1.1827885228783865e-07,
"loss": 0.0,
"step": 632
},
{
"epoch": 5.501440922190202,
"grad_norm": 0.010695324279367924,
"learning_rate": 1.1426652358124817e-07,
"loss": 0.0,
"step": 633
},
{
"epoch": 5.510086455331412,
"grad_norm": 0.007640014868229628,
"learning_rate": 1.1032183690276754e-07,
"loss": 0.0,
"step": 634
},
{
"epoch": 5.518731988472623,
"grad_norm": 0.020597506314516068,
"learning_rate": 1.0644490409498636e-07,
"loss": 0.0001,
"step": 635
},
{
"epoch": 5.527377521613833,
"grad_norm": 0.004687067586928606,
"learning_rate": 1.0263583507948593e-07,
"loss": 0.0,
"step": 636
},
{
"epoch": 5.536023054755043,
"grad_norm": 0.007369566708803177,
"learning_rate": 9.889473785372555e-08,
"loss": 0.0,
"step": 637
},
{
"epoch": 5.544668587896254,
"grad_norm": 0.014998351223766804,
"learning_rate": 9.522171848797917e-08,
"loss": 0.0,
"step": 638
},
{
"epoch": 5.553314121037464,
"grad_norm": 0.005501555744558573,
"learning_rate": 9.161688112232836e-08,
"loss": 0.0,
"step": 639
},
{
"epoch": 5.561959654178675,
"grad_norm": 0.029769135639071465,
"learning_rate": 8.808032796371019e-08,
"loss": 0.0001,
"step": 640
},
{
"epoch": 5.5706051873198845,
"grad_norm": 0.01651173271238804,
"learning_rate": 8.461215928301819e-08,
"loss": 0.0,
"step": 641
},
{
"epoch": 5.579250720461095,
"grad_norm": 0.0046575190499424934,
"learning_rate": 8.121247341226074e-08,
"loss": 0.0,
"step": 642
},
{
"epoch": 5.587896253602305,
"grad_norm": 0.008450312539935112,
"learning_rate": 7.78813667417716e-08,
"loss": 0.0,
"step": 643
},
{
"epoch": 5.596541786743516,
"grad_norm": 0.007621787954121828,
"learning_rate": 7.46189337174788e-08,
"loss": 0.0,
"step": 644
},
{
"epoch": 5.605187319884726,
"grad_norm": 0.0086066210642457,
"learning_rate": 7.142526683822537e-08,
"loss": 0.0,
"step": 645
},
{
"epoch": 5.6138328530259365,
"grad_norm": 0.012834986671805382,
"learning_rate": 6.830045665314672e-08,
"loss": 0.0,
"step": 646
},
{
"epoch": 5.622478386167147,
"grad_norm": 0.005586333107203245,
"learning_rate": 6.524459175910464e-08,
"loss": 0.0,
"step": 647
},
{
"epoch": 5.631123919308357,
"grad_norm": 0.009992795065045357,
"learning_rate": 6.225775879817431e-08,
"loss": 0.0,
"step": 648
},
{
"epoch": 5.639769452449568,
"grad_norm": 0.006056077778339386,
"learning_rate": 5.934004245518793e-08,
"loss": 0.0,
"step": 649
},
{
"epoch": 5.648414985590778,
"grad_norm": 0.008581247180700302,
"learning_rate": 5.649152545533332e-08,
"loss": 0.0002,
"step": 650
},
{
"epoch": 5.6570605187319885,
"grad_norm": 0.05463126301765442,
"learning_rate": 5.371228856180993e-08,
"loss": 0.0,
"step": 651
},
{
"epoch": 5.665706051873199,
"grad_norm": 0.009801640175282955,
"learning_rate": 5.100241057353683e-08,
"loss": 0.0,
"step": 652
},
{
"epoch": 5.674351585014409,
"grad_norm": 0.008849680423736572,
"learning_rate": 4.8361968322920005e-08,
"loss": 0.0,
"step": 653
},
{
"epoch": 5.68299711815562,
"grad_norm": 0.008909969590604305,
"learning_rate": 4.579103667367385e-08,
"loss": 0.0001,
"step": 654
},
{
"epoch": 5.69164265129683,
"grad_norm": 0.7292889952659607,
"learning_rate": 4.328968851869758e-08,
"loss": 0.0,
"step": 655
},
{
"epoch": 5.70028818443804,
"grad_norm": 0.0066415779292583466,
"learning_rate": 4.0857994778009945e-08,
"loss": 0.0001,
"step": 656
},
{
"epoch": 5.708933717579251,
"grad_norm": 0.03848518431186676,
"learning_rate": 3.84960243967375e-08,
"loss": 0.0002,
"step": 657
},
{
"epoch": 5.717579250720461,
"grad_norm": 0.055817726999521255,
"learning_rate": 3.620384434316005e-08,
"loss": 0.0001,
"step": 658
},
{
"epoch": 5.726224783861672,
"grad_norm": 0.008442546240985394,
"learning_rate": 3.398151960681162e-08,
"loss": 0.0001,
"step": 659
},
{
"epoch": 5.734870317002882,
"grad_norm": 0.03930019959807396,
"learning_rate": 3.182911319663862e-08,
"loss": 0.0,
"step": 660
},
{
"epoch": 5.743515850144092,
"grad_norm": 0.012743629515171051,
"learning_rate": 2.9746686139212612e-08,
"loss": 0.0,
"step": 661
},
{
"epoch": 5.752161383285302,
"grad_norm": 0.008391858078539371,
"learning_rate": 2.7734297477000626e-08,
"loss": 0.0002,
"step": 662
},
{
"epoch": 5.760806916426513,
"grad_norm": 0.06344736367464066,
"learning_rate": 2.5792004266690095e-08,
"loss": 0.0,
"step": 663
},
{
"epoch": 5.769452449567723,
"grad_norm": 0.00729775195941329,
"learning_rate": 2.3919861577572924e-08,
"loss": 0.0,
"step": 664
},
{
"epoch": 5.778097982708934,
"grad_norm": 0.01558664534240961,
"learning_rate": 2.211792248998229e-08,
"loss": 0.0001,
"step": 665
},
{
"epoch": 5.786743515850144,
"grad_norm": 0.016456814482808113,
"learning_rate": 2.038623809378859e-08,
"loss": 0.0,
"step": 666
},
{
"epoch": 5.795389048991354,
"grad_norm": 0.008453302085399628,
"learning_rate": 1.872485748695113e-08,
"loss": 0.0,
"step": 667
},
{
"epoch": 5.804034582132565,
"grad_norm": 0.004515933804214001,
"learning_rate": 1.7133827774125368e-08,
"loss": 0.0001,
"step": 668
},
{
"epoch": 5.812680115273775,
"grad_norm": 0.02883203886449337,
"learning_rate": 1.5613194065327854e-08,
"loss": 0.0001,
"step": 669
},
{
"epoch": 5.821325648414986,
"grad_norm": 0.02576618641614914,
"learning_rate": 1.4162999474657268e-08,
"loss": 0.0,
"step": 670
},
{
"epoch": 5.829971181556196,
"grad_norm": 0.006904300302267075,
"learning_rate": 1.2783285119071232e-08,
"loss": 0.0,
"step": 671
},
{
"epoch": 5.838616714697406,
"grad_norm": 0.015726493671536446,
"learning_rate": 1.1474090117221948e-08,
"loss": 0.0001,
"step": 672
},
{
"epoch": 5.847262247838617,
"grad_norm": 0.032056134194135666,
"learning_rate": 1.023545158834599e-08,
"loss": 0.0,
"step": 673
},
{
"epoch": 5.855907780979827,
"grad_norm": 0.0056428308598697186,
"learning_rate": 9.067404651211808e-09,
"loss": 0.0001,
"step": 674
},
{
"epoch": 5.8645533141210375,
"grad_norm": 0.037972234189510345,
"learning_rate": 7.96998242312469e-09,
"loss": 0.0001,
"step": 675
},
{
"epoch": 5.873198847262248,
"grad_norm": 0.018561307340860367,
"learning_rate": 6.943216018987509e-09,
"loss": 0.0,
"step": 676
},
{
"epoch": 5.881844380403458,
"grad_norm": 0.014271295629441738,
"learning_rate": 5.987134550418106e-09,
"loss": 0.0,
"step": 677
},
{
"epoch": 5.890489913544669,
"grad_norm": 0.006300410255789757,
"learning_rate": 5.101765124925218e-09,
"loss": 0.0001,
"step": 678
},
{
"epoch": 5.899135446685879,
"grad_norm": 0.03615653142333031,
"learning_rate": 4.287132845137709e-09,
"loss": 0.0,
"step": 679
},
{
"epoch": 5.9077809798270895,
"grad_norm": 0.00793964322656393,
"learning_rate": 3.5432608080951392e-09,
"loss": 0.0,
"step": 680
},
{
"epoch": 5.916426512968299,
"grad_norm": 0.011542227119207382,
"learning_rate": 2.870170104591341e-09,
"loss": 0.0001,
"step": 681
},
{
"epoch": 5.92507204610951,
"grad_norm": 0.04817141219973564,
"learning_rate": 2.2678798185771235e-09,
"loss": 0.0,
"step": 682
},
{
"epoch": 5.93371757925072,
"grad_norm": 0.006957252975553274,
"learning_rate": 1.7364070266193135e-09,
"loss": 0.0001,
"step": 683
},
{
"epoch": 5.942363112391931,
"grad_norm": 0.028225049376487732,
"learning_rate": 1.2757667974155896e-09,
"loss": 0.0002,
"step": 684
},
{
"epoch": 5.9510086455331415,
"grad_norm": 0.060419417917728424,
"learning_rate": 8.859721913684339e-10,
"loss": 0.0,
"step": 685
},
{
"epoch": 5.959654178674351,
"grad_norm": 0.005427508614957333,
"learning_rate": 5.670342602148715e-10,
"loss": 0.0001,
"step": 686
},
{
"epoch": 5.968299711815562,
"grad_norm": 0.031979430466890335,
"learning_rate": 3.1896204671144627e-10,
"loss": 0.0001,
"step": 687
},
{
"epoch": 5.976945244956772,
"grad_norm": 0.015397738665342331,
"learning_rate": 1.4176258437970103e-10,
"loss": 0.0001,
"step": 688
},
{
"epoch": 5.985590778097983,
"grad_norm": 0.03583737090229988,
"learning_rate": 3.544089730633804e-11,
"loss": 0.0001,
"step": 689
},
{
"epoch": 5.994236311239193,
"grad_norm": 0.044873710721731186,
"learning_rate": 0.0,
"loss": 0.0,
"step": 690
}
],
"logging_steps": 1,
"max_steps": 690,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 115,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.747175381925888e+18,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}