PT13k_sft / checkpoint-2262 /trainer_state.json
YanSong97's picture
Upload folder using huggingface_hub
e52faab verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 500,
"global_step": 2262,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0008841732979664014,
"grad_norm": 52.62539291381836,
"learning_rate": 4.347826086956522e-07,
"loss": 2.5137,
"step": 1
},
{
"epoch": 0.0017683465959328027,
"grad_norm": 53.25148010253906,
"learning_rate": 8.695652173913044e-07,
"loss": 2.5043,
"step": 2
},
{
"epoch": 0.002652519893899204,
"grad_norm": 51.79323959350586,
"learning_rate": 1.3043478260869566e-06,
"loss": 2.5274,
"step": 3
},
{
"epoch": 0.0035366931918656055,
"grad_norm": 50.44014358520508,
"learning_rate": 1.7391304347826088e-06,
"loss": 2.5222,
"step": 4
},
{
"epoch": 0.004420866489832007,
"grad_norm": 48.93881607055664,
"learning_rate": 2.173913043478261e-06,
"loss": 2.423,
"step": 5
},
{
"epoch": 0.005305039787798408,
"grad_norm": 43.29012680053711,
"learning_rate": 2.6086956521739132e-06,
"loss": 2.2899,
"step": 6
},
{
"epoch": 0.00618921308576481,
"grad_norm": 43.9425048828125,
"learning_rate": 3.043478260869566e-06,
"loss": 2.2777,
"step": 7
},
{
"epoch": 0.007073386383731211,
"grad_norm": 28.587739944458008,
"learning_rate": 3.4782608695652175e-06,
"loss": 1.9677,
"step": 8
},
{
"epoch": 0.007957559681697613,
"grad_norm": 24.07233428955078,
"learning_rate": 3.91304347826087e-06,
"loss": 1.8956,
"step": 9
},
{
"epoch": 0.008841732979664015,
"grad_norm": 10.310393333435059,
"learning_rate": 4.347826086956522e-06,
"loss": 1.5933,
"step": 10
},
{
"epoch": 0.009725906277630416,
"grad_norm": 7.91811466217041,
"learning_rate": 4.782608695652174e-06,
"loss": 1.4909,
"step": 11
},
{
"epoch": 0.010610079575596816,
"grad_norm": 6.773797035217285,
"learning_rate": 5.2173913043478265e-06,
"loss": 1.4643,
"step": 12
},
{
"epoch": 0.011494252873563218,
"grad_norm": 6.0528364181518555,
"learning_rate": 5.652173913043479e-06,
"loss": 1.4289,
"step": 13
},
{
"epoch": 0.01237842617152962,
"grad_norm": 7.164632320404053,
"learning_rate": 6.086956521739132e-06,
"loss": 1.3177,
"step": 14
},
{
"epoch": 0.013262599469496022,
"grad_norm": 5.866113662719727,
"learning_rate": 6.521739130434783e-06,
"loss": 1.2914,
"step": 15
},
{
"epoch": 0.014146772767462422,
"grad_norm": 5.118316173553467,
"learning_rate": 6.956521739130435e-06,
"loss": 1.237,
"step": 16
},
{
"epoch": 0.015030946065428824,
"grad_norm": 3.067793846130371,
"learning_rate": 7.391304347826087e-06,
"loss": 1.1852,
"step": 17
},
{
"epoch": 0.015915119363395226,
"grad_norm": 2.3958816528320312,
"learning_rate": 7.82608695652174e-06,
"loss": 1.1665,
"step": 18
},
{
"epoch": 0.016799292661361626,
"grad_norm": 2.082918643951416,
"learning_rate": 8.260869565217392e-06,
"loss": 1.1404,
"step": 19
},
{
"epoch": 0.01768346595932803,
"grad_norm": 1.6263797283172607,
"learning_rate": 8.695652173913044e-06,
"loss": 1.0926,
"step": 20
},
{
"epoch": 0.01856763925729443,
"grad_norm": 1.2969629764556885,
"learning_rate": 9.130434782608697e-06,
"loss": 1.0941,
"step": 21
},
{
"epoch": 0.019451812555260833,
"grad_norm": 1.166627287864685,
"learning_rate": 9.565217391304349e-06,
"loss": 1.0814,
"step": 22
},
{
"epoch": 0.020335985853227233,
"grad_norm": 0.9910293221473694,
"learning_rate": 1e-05,
"loss": 1.0616,
"step": 23
},
{
"epoch": 0.021220159151193633,
"grad_norm": 0.8890444040298462,
"learning_rate": 1.0434782608695653e-05,
"loss": 1.0431,
"step": 24
},
{
"epoch": 0.022104332449160036,
"grad_norm": 0.6922053694725037,
"learning_rate": 1.0869565217391305e-05,
"loss": 0.9911,
"step": 25
},
{
"epoch": 0.022988505747126436,
"grad_norm": 0.7115573287010193,
"learning_rate": 1.1304347826086957e-05,
"loss": 0.9957,
"step": 26
},
{
"epoch": 0.023872679045092837,
"grad_norm": 0.6320987343788147,
"learning_rate": 1.1739130434782611e-05,
"loss": 0.9821,
"step": 27
},
{
"epoch": 0.02475685234305924,
"grad_norm": 0.6212517619132996,
"learning_rate": 1.2173913043478263e-05,
"loss": 0.9811,
"step": 28
},
{
"epoch": 0.02564102564102564,
"grad_norm": 0.551284670829773,
"learning_rate": 1.2608695652173915e-05,
"loss": 0.9618,
"step": 29
},
{
"epoch": 0.026525198938992044,
"grad_norm": 0.5245449542999268,
"learning_rate": 1.3043478260869566e-05,
"loss": 0.9421,
"step": 30
},
{
"epoch": 0.027409372236958444,
"grad_norm": 0.5216027498245239,
"learning_rate": 1.3478260869565218e-05,
"loss": 0.9537,
"step": 31
},
{
"epoch": 0.028293545534924844,
"grad_norm": 0.49954429268836975,
"learning_rate": 1.391304347826087e-05,
"loss": 0.9249,
"step": 32
},
{
"epoch": 0.029177718832891247,
"grad_norm": 0.5300860404968262,
"learning_rate": 1.4347826086956522e-05,
"loss": 0.9363,
"step": 33
},
{
"epoch": 0.030061892130857647,
"grad_norm": 0.4225921034812927,
"learning_rate": 1.4782608695652174e-05,
"loss": 0.9349,
"step": 34
},
{
"epoch": 0.03094606542882405,
"grad_norm": 0.4705648422241211,
"learning_rate": 1.5217391304347828e-05,
"loss": 0.9294,
"step": 35
},
{
"epoch": 0.03183023872679045,
"grad_norm": 0.40804997086524963,
"learning_rate": 1.565217391304348e-05,
"loss": 0.9093,
"step": 36
},
{
"epoch": 0.032714412024756855,
"grad_norm": 0.4276791512966156,
"learning_rate": 1.6086956521739132e-05,
"loss": 0.9377,
"step": 37
},
{
"epoch": 0.03359858532272325,
"grad_norm": 0.3864081799983978,
"learning_rate": 1.6521739130434785e-05,
"loss": 0.9325,
"step": 38
},
{
"epoch": 0.034482758620689655,
"grad_norm": 0.4173469841480255,
"learning_rate": 1.6956521739130437e-05,
"loss": 0.8892,
"step": 39
},
{
"epoch": 0.03536693191865606,
"grad_norm": 0.37541916966438293,
"learning_rate": 1.739130434782609e-05,
"loss": 0.8816,
"step": 40
},
{
"epoch": 0.036251105216622455,
"grad_norm": 0.4038332402706146,
"learning_rate": 1.782608695652174e-05,
"loss": 0.8932,
"step": 41
},
{
"epoch": 0.03713527851458886,
"grad_norm": 0.4282025992870331,
"learning_rate": 1.8260869565217393e-05,
"loss": 0.8741,
"step": 42
},
{
"epoch": 0.03801945181255526,
"grad_norm": 0.406696617603302,
"learning_rate": 1.8695652173913045e-05,
"loss": 0.914,
"step": 43
},
{
"epoch": 0.038903625110521665,
"grad_norm": 0.37554579973220825,
"learning_rate": 1.9130434782608697e-05,
"loss": 0.8728,
"step": 44
},
{
"epoch": 0.03978779840848806,
"grad_norm": 0.38579878211021423,
"learning_rate": 1.956521739130435e-05,
"loss": 0.8615,
"step": 45
},
{
"epoch": 0.040671971706454466,
"grad_norm": 0.3441755473613739,
"learning_rate": 2e-05,
"loss": 0.892,
"step": 46
},
{
"epoch": 0.04155614500442087,
"grad_norm": 0.36551767587661743,
"learning_rate": 1.999999753905701e-05,
"loss": 0.8863,
"step": 47
},
{
"epoch": 0.042440318302387266,
"grad_norm": 0.3470812439918518,
"learning_rate": 1.9999990156229245e-05,
"loss": 0.89,
"step": 48
},
{
"epoch": 0.04332449160035367,
"grad_norm": 0.3591214120388031,
"learning_rate": 1.9999977851520337e-05,
"loss": 0.8833,
"step": 49
},
{
"epoch": 0.04420866489832007,
"grad_norm": 0.3493013381958008,
"learning_rate": 1.9999960624936347e-05,
"loss": 0.8842,
"step": 50
},
{
"epoch": 0.04509283819628647,
"grad_norm": 0.3526720702648163,
"learning_rate": 1.9999938476485752e-05,
"loss": 0.8588,
"step": 51
},
{
"epoch": 0.04597701149425287,
"grad_norm": 0.34567469358444214,
"learning_rate": 1.9999911406179456e-05,
"loss": 0.8723,
"step": 52
},
{
"epoch": 0.046861184792219276,
"grad_norm": 0.350015789270401,
"learning_rate": 1.9999879414030773e-05,
"loss": 0.854,
"step": 53
},
{
"epoch": 0.04774535809018567,
"grad_norm": 0.36239174008369446,
"learning_rate": 1.999984250005546e-05,
"loss": 0.8526,
"step": 54
},
{
"epoch": 0.04862953138815208,
"grad_norm": 0.3408949077129364,
"learning_rate": 1.9999800664271685e-05,
"loss": 0.8775,
"step": 55
},
{
"epoch": 0.04951370468611848,
"grad_norm": 0.36486580967903137,
"learning_rate": 1.999975390670003e-05,
"loss": 0.8618,
"step": 56
},
{
"epoch": 0.050397877984084884,
"grad_norm": 0.34689685702323914,
"learning_rate": 1.999970222736352e-05,
"loss": 0.8481,
"step": 57
},
{
"epoch": 0.05128205128205128,
"grad_norm": 0.3224916160106659,
"learning_rate": 1.9999645626287583e-05,
"loss": 0.8149,
"step": 58
},
{
"epoch": 0.052166224580017684,
"grad_norm": 0.3246098756790161,
"learning_rate": 1.9999584103500078e-05,
"loss": 0.8305,
"step": 59
},
{
"epoch": 0.05305039787798409,
"grad_norm": 0.34423261880874634,
"learning_rate": 1.999951765903129e-05,
"loss": 0.8503,
"step": 60
},
{
"epoch": 0.053934571175950484,
"grad_norm": 0.3432867228984833,
"learning_rate": 1.999944629291392e-05,
"loss": 0.8246,
"step": 61
},
{
"epoch": 0.05481874447391689,
"grad_norm": 0.3566789925098419,
"learning_rate": 1.999937000518309e-05,
"loss": 0.8493,
"step": 62
},
{
"epoch": 0.05570291777188329,
"grad_norm": 0.3615923523902893,
"learning_rate": 1.9999288795876356e-05,
"loss": 0.8606,
"step": 63
},
{
"epoch": 0.05658709106984969,
"grad_norm": 0.34703564643859863,
"learning_rate": 1.999920266503368e-05,
"loss": 0.834,
"step": 64
},
{
"epoch": 0.05747126436781609,
"grad_norm": 0.3509230613708496,
"learning_rate": 1.999911161269746e-05,
"loss": 0.8226,
"step": 65
},
{
"epoch": 0.058355437665782495,
"grad_norm": 0.39879849553108215,
"learning_rate": 1.9999015638912507e-05,
"loss": 0.8214,
"step": 66
},
{
"epoch": 0.0592396109637489,
"grad_norm": 0.3476121425628662,
"learning_rate": 1.9998914743726062e-05,
"loss": 0.8369,
"step": 67
},
{
"epoch": 0.060123784261715295,
"grad_norm": 0.37484925985336304,
"learning_rate": 1.9998808927187784e-05,
"loss": 0.849,
"step": 68
},
{
"epoch": 0.0610079575596817,
"grad_norm": 0.37329110503196716,
"learning_rate": 1.9998698189349753e-05,
"loss": 0.8424,
"step": 69
},
{
"epoch": 0.0618921308576481,
"grad_norm": 0.3668043613433838,
"learning_rate": 1.9998582530266473e-05,
"loss": 0.841,
"step": 70
},
{
"epoch": 0.0627763041556145,
"grad_norm": 0.3917650878429413,
"learning_rate": 1.999846194999487e-05,
"loss": 0.8633,
"step": 71
},
{
"epoch": 0.0636604774535809,
"grad_norm": 0.39643633365631104,
"learning_rate": 1.9998336448594293e-05,
"loss": 0.8235,
"step": 72
},
{
"epoch": 0.0645446507515473,
"grad_norm": 0.36726894974708557,
"learning_rate": 1.9998206026126512e-05,
"loss": 0.8369,
"step": 73
},
{
"epoch": 0.06542882404951371,
"grad_norm": 0.3852282464504242,
"learning_rate": 1.999807068265572e-05,
"loss": 0.8253,
"step": 74
},
{
"epoch": 0.06631299734748011,
"grad_norm": 0.3984280824661255,
"learning_rate": 1.999793041824853e-05,
"loss": 0.8549,
"step": 75
},
{
"epoch": 0.0671971706454465,
"grad_norm": 0.4089086055755615,
"learning_rate": 1.9997785232973982e-05,
"loss": 0.8144,
"step": 76
},
{
"epoch": 0.0680813439434129,
"grad_norm": 0.32697632908821106,
"learning_rate": 1.999763512690353e-05,
"loss": 0.8089,
"step": 77
},
{
"epoch": 0.06896551724137931,
"grad_norm": 0.40694114565849304,
"learning_rate": 1.9997480100111056e-05,
"loss": 0.8399,
"step": 78
},
{
"epoch": 0.06984969053934571,
"grad_norm": 0.3637621998786926,
"learning_rate": 1.9997320152672865e-05,
"loss": 0.8219,
"step": 79
},
{
"epoch": 0.07073386383731212,
"grad_norm": 0.388299822807312,
"learning_rate": 1.9997155284667678e-05,
"loss": 0.8462,
"step": 80
},
{
"epoch": 0.07161803713527852,
"grad_norm": 0.4305644631385803,
"learning_rate": 1.9996985496176645e-05,
"loss": 0.838,
"step": 81
},
{
"epoch": 0.07250221043324491,
"grad_norm": 0.39962026476860046,
"learning_rate": 1.9996810787283327e-05,
"loss": 0.8091,
"step": 82
},
{
"epoch": 0.07338638373121131,
"grad_norm": 0.34810447692871094,
"learning_rate": 1.999663115807372e-05,
"loss": 0.7929,
"step": 83
},
{
"epoch": 0.07427055702917772,
"grad_norm": 0.3809176981449127,
"learning_rate": 1.9996446608636235e-05,
"loss": 0.8016,
"step": 84
},
{
"epoch": 0.07515473032714412,
"grad_norm": 0.3695034980773926,
"learning_rate": 1.9996257139061705e-05,
"loss": 0.8258,
"step": 85
},
{
"epoch": 0.07603890362511052,
"grad_norm": 0.3905010521411896,
"learning_rate": 1.999606274944338e-05,
"loss": 0.8072,
"step": 86
},
{
"epoch": 0.07692307692307693,
"grad_norm": 0.39127102494239807,
"learning_rate": 1.999586343987694e-05,
"loss": 0.8111,
"step": 87
},
{
"epoch": 0.07780725022104333,
"grad_norm": 0.3551279604434967,
"learning_rate": 1.9995659210460484e-05,
"loss": 0.8305,
"step": 88
},
{
"epoch": 0.07869142351900972,
"grad_norm": 0.3517471253871918,
"learning_rate": 1.999545006129453e-05,
"loss": 0.8106,
"step": 89
},
{
"epoch": 0.07957559681697612,
"grad_norm": 0.3529112935066223,
"learning_rate": 1.9995235992482023e-05,
"loss": 0.7991,
"step": 90
},
{
"epoch": 0.08045977011494253,
"grad_norm": 0.3552139103412628,
"learning_rate": 1.9995017004128317e-05,
"loss": 0.8413,
"step": 91
},
{
"epoch": 0.08134394341290893,
"grad_norm": 0.3667570650577545,
"learning_rate": 1.99947930963412e-05,
"loss": 0.8178,
"step": 92
},
{
"epoch": 0.08222811671087533,
"grad_norm": 0.3715391457080841,
"learning_rate": 1.9994564269230877e-05,
"loss": 0.8549,
"step": 93
},
{
"epoch": 0.08311229000884174,
"grad_norm": 0.3452084958553314,
"learning_rate": 1.9994330522909974e-05,
"loss": 0.787,
"step": 94
},
{
"epoch": 0.08399646330680813,
"grad_norm": 0.3476298153400421,
"learning_rate": 1.9994091857493537e-05,
"loss": 0.7867,
"step": 95
},
{
"epoch": 0.08488063660477453,
"grad_norm": 0.3360370099544525,
"learning_rate": 1.9993848273099038e-05,
"loss": 0.7871,
"step": 96
},
{
"epoch": 0.08576480990274093,
"grad_norm": 0.38479292392730713,
"learning_rate": 1.9993599769846364e-05,
"loss": 0.8187,
"step": 97
},
{
"epoch": 0.08664898320070734,
"grad_norm": 0.38172826170921326,
"learning_rate": 1.9993346347857823e-05,
"loss": 0.8094,
"step": 98
},
{
"epoch": 0.08753315649867374,
"grad_norm": 0.3376418948173523,
"learning_rate": 1.9993088007258148e-05,
"loss": 0.8203,
"step": 99
},
{
"epoch": 0.08841732979664015,
"grad_norm": 0.35402947664260864,
"learning_rate": 1.9992824748174494e-05,
"loss": 0.787,
"step": 100
},
{
"epoch": 0.08930150309460655,
"grad_norm": 0.33833909034729004,
"learning_rate": 1.9992556570736433e-05,
"loss": 0.8066,
"step": 101
},
{
"epoch": 0.09018567639257294,
"grad_norm": 0.34219443798065186,
"learning_rate": 1.9992283475075957e-05,
"loss": 0.802,
"step": 102
},
{
"epoch": 0.09106984969053934,
"grad_norm": 0.34456929564476013,
"learning_rate": 1.999200546132748e-05,
"loss": 0.7941,
"step": 103
},
{
"epoch": 0.09195402298850575,
"grad_norm": 0.35270950198173523,
"learning_rate": 1.9991722529627842e-05,
"loss": 0.8371,
"step": 104
},
{
"epoch": 0.09283819628647215,
"grad_norm": 0.3561088740825653,
"learning_rate": 1.9991434680116292e-05,
"loss": 0.7869,
"step": 105
},
{
"epoch": 0.09372236958443855,
"grad_norm": 0.36413389444351196,
"learning_rate": 1.9991141912934514e-05,
"loss": 0.8342,
"step": 106
},
{
"epoch": 0.09460654288240496,
"grad_norm": 0.3453677296638489,
"learning_rate": 1.99908442282266e-05,
"loss": 0.8195,
"step": 107
},
{
"epoch": 0.09549071618037135,
"grad_norm": 0.34683361649513245,
"learning_rate": 1.9990541626139064e-05,
"loss": 0.8019,
"step": 108
},
{
"epoch": 0.09637488947833775,
"grad_norm": 0.3464336395263672,
"learning_rate": 1.999023410682085e-05,
"loss": 0.7837,
"step": 109
},
{
"epoch": 0.09725906277630415,
"grad_norm": 0.33157557249069214,
"learning_rate": 1.9989921670423312e-05,
"loss": 0.8065,
"step": 110
},
{
"epoch": 0.09814323607427056,
"grad_norm": 0.33555206656455994,
"learning_rate": 1.9989604317100225e-05,
"loss": 0.7689,
"step": 111
},
{
"epoch": 0.09902740937223696,
"grad_norm": 0.33501067757606506,
"learning_rate": 1.9989282047007794e-05,
"loss": 0.7885,
"step": 112
},
{
"epoch": 0.09991158267020336,
"grad_norm": 0.33981025218963623,
"learning_rate": 1.9988954860304627e-05,
"loss": 0.7725,
"step": 113
},
{
"epoch": 0.10079575596816977,
"grad_norm": 0.35524728894233704,
"learning_rate": 1.998862275715177e-05,
"loss": 0.7962,
"step": 114
},
{
"epoch": 0.10167992926613616,
"grad_norm": 0.35333961248397827,
"learning_rate": 1.9988285737712678e-05,
"loss": 0.8066,
"step": 115
},
{
"epoch": 0.10256410256410256,
"grad_norm": 0.32982128858566284,
"learning_rate": 1.9987943802153228e-05,
"loss": 0.7863,
"step": 116
},
{
"epoch": 0.10344827586206896,
"grad_norm": 0.37434104084968567,
"learning_rate": 1.9987596950641713e-05,
"loss": 0.792,
"step": 117
},
{
"epoch": 0.10433244916003537,
"grad_norm": 0.3224402964115143,
"learning_rate": 1.9987245183348857e-05,
"loss": 0.7924,
"step": 118
},
{
"epoch": 0.10521662245800177,
"grad_norm": 0.34271374344825745,
"learning_rate": 1.9986888500447788e-05,
"loss": 0.7964,
"step": 119
},
{
"epoch": 0.10610079575596817,
"grad_norm": 0.34524405002593994,
"learning_rate": 1.9986526902114064e-05,
"loss": 0.8164,
"step": 120
},
{
"epoch": 0.10698496905393456,
"grad_norm": 0.33438634872436523,
"learning_rate": 1.998616038852566e-05,
"loss": 0.7929,
"step": 121
},
{
"epoch": 0.10786914235190097,
"grad_norm": 0.3367898166179657,
"learning_rate": 1.9985788959862975e-05,
"loss": 0.7777,
"step": 122
},
{
"epoch": 0.10875331564986737,
"grad_norm": 0.32885032892227173,
"learning_rate": 1.9985412616308812e-05,
"loss": 0.7708,
"step": 123
},
{
"epoch": 0.10963748894783377,
"grad_norm": 0.3379555642604828,
"learning_rate": 1.998503135804841e-05,
"loss": 0.7791,
"step": 124
},
{
"epoch": 0.11052166224580018,
"grad_norm": 0.3426035940647125,
"learning_rate": 1.9984645185269417e-05,
"loss": 0.7968,
"step": 125
},
{
"epoch": 0.11140583554376658,
"grad_norm": 0.33080124855041504,
"learning_rate": 1.9984254098161905e-05,
"loss": 0.7782,
"step": 126
},
{
"epoch": 0.11229000884173299,
"grad_norm": 0.30623823404312134,
"learning_rate": 1.9983858096918362e-05,
"loss": 0.7911,
"step": 127
},
{
"epoch": 0.11317418213969938,
"grad_norm": 0.33538591861724854,
"learning_rate": 1.9983457181733695e-05,
"loss": 0.7874,
"step": 128
},
{
"epoch": 0.11405835543766578,
"grad_norm": 0.3172617256641388,
"learning_rate": 1.9983051352805226e-05,
"loss": 0.7707,
"step": 129
},
{
"epoch": 0.11494252873563218,
"grad_norm": 0.31174033880233765,
"learning_rate": 1.9982640610332704e-05,
"loss": 0.807,
"step": 130
},
{
"epoch": 0.11582670203359859,
"grad_norm": 0.31524595618247986,
"learning_rate": 1.9982224954518293e-05,
"loss": 0.7719,
"step": 131
},
{
"epoch": 0.11671087533156499,
"grad_norm": 0.3138945400714874,
"learning_rate": 1.998180438556657e-05,
"loss": 0.7761,
"step": 132
},
{
"epoch": 0.11759504862953139,
"grad_norm": 0.3435557782649994,
"learning_rate": 1.9981378903684535e-05,
"loss": 0.7643,
"step": 133
},
{
"epoch": 0.1184792219274978,
"grad_norm": 0.3383101522922516,
"learning_rate": 1.998094850908161e-05,
"loss": 0.8009,
"step": 134
},
{
"epoch": 0.11936339522546419,
"grad_norm": 0.33562222123146057,
"learning_rate": 1.9980513201969624e-05,
"loss": 0.7899,
"step": 135
},
{
"epoch": 0.12024756852343059,
"grad_norm": 0.3479905128479004,
"learning_rate": 1.9980072982562836e-05,
"loss": 0.7829,
"step": 136
},
{
"epoch": 0.121131741821397,
"grad_norm": 0.3248721659183502,
"learning_rate": 1.997962785107791e-05,
"loss": 0.7869,
"step": 137
},
{
"epoch": 0.1220159151193634,
"grad_norm": 0.4377537667751312,
"learning_rate": 1.997917780773394e-05,
"loss": 0.8274,
"step": 138
},
{
"epoch": 0.1229000884173298,
"grad_norm": 0.37080663442611694,
"learning_rate": 1.997872285275243e-05,
"loss": 0.7703,
"step": 139
},
{
"epoch": 0.1237842617152962,
"grad_norm": 0.32011935114860535,
"learning_rate": 1.9978262986357305e-05,
"loss": 0.7581,
"step": 140
},
{
"epoch": 0.1246684350132626,
"grad_norm": 0.37553200125694275,
"learning_rate": 1.9977798208774907e-05,
"loss": 0.8024,
"step": 141
},
{
"epoch": 0.125552608311229,
"grad_norm": 0.31366807222366333,
"learning_rate": 1.9977328520233988e-05,
"loss": 0.7945,
"step": 142
},
{
"epoch": 0.12643678160919541,
"grad_norm": 0.3468271791934967,
"learning_rate": 1.9976853920965733e-05,
"loss": 0.7835,
"step": 143
},
{
"epoch": 0.1273209549071618,
"grad_norm": 0.34685733914375305,
"learning_rate": 1.9976374411203724e-05,
"loss": 0.8237,
"step": 144
},
{
"epoch": 0.1282051282051282,
"grad_norm": 0.351948618888855,
"learning_rate": 1.9975889991183982e-05,
"loss": 0.7799,
"step": 145
},
{
"epoch": 0.1290893015030946,
"grad_norm": 0.3291540741920471,
"learning_rate": 1.997540066114492e-05,
"loss": 0.7849,
"step": 146
},
{
"epoch": 0.129973474801061,
"grad_norm": 0.3309676945209503,
"learning_rate": 1.9974906421327392e-05,
"loss": 0.7456,
"step": 147
},
{
"epoch": 0.13085764809902742,
"grad_norm": 0.33874332904815674,
"learning_rate": 1.997440727197465e-05,
"loss": 0.8118,
"step": 148
},
{
"epoch": 0.1317418213969938,
"grad_norm": 0.32740166783332825,
"learning_rate": 1.9973903213332373e-05,
"loss": 0.7631,
"step": 149
},
{
"epoch": 0.13262599469496023,
"grad_norm": 0.3246428072452545,
"learning_rate": 1.9973394245648653e-05,
"loss": 0.7801,
"step": 150
},
{
"epoch": 0.13351016799292661,
"grad_norm": 0.31797605752944946,
"learning_rate": 1.9972880369173996e-05,
"loss": 0.7763,
"step": 151
},
{
"epoch": 0.134394341290893,
"grad_norm": 0.33574724197387695,
"learning_rate": 1.9972361584161322e-05,
"loss": 0.7746,
"step": 152
},
{
"epoch": 0.13527851458885942,
"grad_norm": 0.32294753193855286,
"learning_rate": 1.9971837890865983e-05,
"loss": 0.769,
"step": 153
},
{
"epoch": 0.1361626878868258,
"grad_norm": 0.33689793944358826,
"learning_rate": 1.997130928954573e-05,
"loss": 0.7729,
"step": 154
},
{
"epoch": 0.13704686118479223,
"grad_norm": 0.3352261185646057,
"learning_rate": 1.9970775780460725e-05,
"loss": 0.7503,
"step": 155
},
{
"epoch": 0.13793103448275862,
"grad_norm": 0.3127923309803009,
"learning_rate": 1.9970237363873565e-05,
"loss": 0.7865,
"step": 156
},
{
"epoch": 0.138815207780725,
"grad_norm": 0.35131433606147766,
"learning_rate": 1.9969694040049252e-05,
"loss": 0.7903,
"step": 157
},
{
"epoch": 0.13969938107869143,
"grad_norm": 0.341541588306427,
"learning_rate": 1.9969145809255205e-05,
"loss": 0.7713,
"step": 158
},
{
"epoch": 0.14058355437665782,
"grad_norm": 0.3427078425884247,
"learning_rate": 1.996859267176125e-05,
"loss": 0.7995,
"step": 159
},
{
"epoch": 0.14146772767462423,
"grad_norm": 0.3513321876525879,
"learning_rate": 1.9968034627839637e-05,
"loss": 0.8007,
"step": 160
},
{
"epoch": 0.14235190097259062,
"grad_norm": 0.3461512327194214,
"learning_rate": 1.9967471677765036e-05,
"loss": 0.7799,
"step": 161
},
{
"epoch": 0.14323607427055704,
"grad_norm": 0.362687885761261,
"learning_rate": 1.9966903821814515e-05,
"loss": 0.7998,
"step": 162
},
{
"epoch": 0.14412024756852343,
"grad_norm": 0.3743812143802643,
"learning_rate": 1.9966331060267574e-05,
"loss": 0.7922,
"step": 163
},
{
"epoch": 0.14500442086648982,
"grad_norm": 0.36852502822875977,
"learning_rate": 1.9965753393406112e-05,
"loss": 0.8072,
"step": 164
},
{
"epoch": 0.14588859416445624,
"grad_norm": 0.355268269777298,
"learning_rate": 1.9965170821514458e-05,
"loss": 0.8088,
"step": 165
},
{
"epoch": 0.14677276746242263,
"grad_norm": 0.32608094811439514,
"learning_rate": 1.9964583344879343e-05,
"loss": 0.7734,
"step": 166
},
{
"epoch": 0.14765694076038904,
"grad_norm": 0.3217647671699524,
"learning_rate": 1.9963990963789917e-05,
"loss": 0.7832,
"step": 167
},
{
"epoch": 0.14854111405835543,
"grad_norm": 0.3335796296596527,
"learning_rate": 1.9963393678537742e-05,
"loss": 0.7734,
"step": 168
},
{
"epoch": 0.14942528735632185,
"grad_norm": 0.3261146545410156,
"learning_rate": 1.9962791489416796e-05,
"loss": 0.7682,
"step": 169
},
{
"epoch": 0.15030946065428824,
"grad_norm": 0.3570301830768585,
"learning_rate": 1.996218439672347e-05,
"loss": 0.7657,
"step": 170
},
{
"epoch": 0.15119363395225463,
"grad_norm": 0.32962653040885925,
"learning_rate": 1.9961572400756572e-05,
"loss": 0.7761,
"step": 171
},
{
"epoch": 0.15207780725022105,
"grad_norm": 0.32405391335487366,
"learning_rate": 1.9960955501817308e-05,
"loss": 0.7592,
"step": 172
},
{
"epoch": 0.15296198054818744,
"grad_norm": 0.32745420932769775,
"learning_rate": 1.996033370020932e-05,
"loss": 0.7625,
"step": 173
},
{
"epoch": 0.15384615384615385,
"grad_norm": 0.34922829270362854,
"learning_rate": 1.9959706996238646e-05,
"loss": 0.7655,
"step": 174
},
{
"epoch": 0.15473032714412024,
"grad_norm": 0.32486554980278015,
"learning_rate": 1.9959075390213745e-05,
"loss": 0.7711,
"step": 175
},
{
"epoch": 0.15561450044208666,
"grad_norm": 0.30626025795936584,
"learning_rate": 1.9958438882445486e-05,
"loss": 0.7593,
"step": 176
},
{
"epoch": 0.15649867374005305,
"grad_norm": 0.37638500332832336,
"learning_rate": 1.9957797473247153e-05,
"loss": 0.7867,
"step": 177
},
{
"epoch": 0.15738284703801944,
"grad_norm": 0.29693952202796936,
"learning_rate": 1.9957151162934433e-05,
"loss": 0.7873,
"step": 178
},
{
"epoch": 0.15826702033598586,
"grad_norm": 0.350038081407547,
"learning_rate": 1.995649995182544e-05,
"loss": 0.7522,
"step": 179
},
{
"epoch": 0.15915119363395225,
"grad_norm": 0.3082639276981354,
"learning_rate": 1.995584384024069e-05,
"loss": 0.7713,
"step": 180
},
{
"epoch": 0.16003536693191867,
"grad_norm": 0.3037581741809845,
"learning_rate": 1.9955182828503108e-05,
"loss": 0.7556,
"step": 181
},
{
"epoch": 0.16091954022988506,
"grad_norm": 0.31596839427948,
"learning_rate": 1.9954516916938045e-05,
"loss": 0.745,
"step": 182
},
{
"epoch": 0.16180371352785147,
"grad_norm": 0.29814088344573975,
"learning_rate": 1.9953846105873255e-05,
"loss": 0.7577,
"step": 183
},
{
"epoch": 0.16268788682581786,
"grad_norm": 0.31259554624557495,
"learning_rate": 1.9953170395638897e-05,
"loss": 0.7761,
"step": 184
},
{
"epoch": 0.16357206012378425,
"grad_norm": 0.3122078478336334,
"learning_rate": 1.995248978656755e-05,
"loss": 0.763,
"step": 185
},
{
"epoch": 0.16445623342175067,
"grad_norm": 0.32268399000167847,
"learning_rate": 1.995180427899421e-05,
"loss": 0.769,
"step": 186
},
{
"epoch": 0.16534040671971706,
"grad_norm": 0.34449225664138794,
"learning_rate": 1.9951113873256262e-05,
"loss": 0.7765,
"step": 187
},
{
"epoch": 0.16622458001768348,
"grad_norm": 0.34683218598365784,
"learning_rate": 1.9950418569693527e-05,
"loss": 0.7767,
"step": 188
},
{
"epoch": 0.16710875331564987,
"grad_norm": 0.3186188340187073,
"learning_rate": 1.994971836864822e-05,
"loss": 0.7467,
"step": 189
},
{
"epoch": 0.16799292661361626,
"grad_norm": 0.3304593861103058,
"learning_rate": 1.9949013270464976e-05,
"loss": 0.7719,
"step": 190
},
{
"epoch": 0.16887709991158267,
"grad_norm": 0.32111820578575134,
"learning_rate": 1.9948303275490834e-05,
"loss": 0.7721,
"step": 191
},
{
"epoch": 0.16976127320954906,
"grad_norm": 0.3233659565448761,
"learning_rate": 1.9947588384075244e-05,
"loss": 0.7709,
"step": 192
},
{
"epoch": 0.17064544650751548,
"grad_norm": 0.31537777185440063,
"learning_rate": 1.994686859657007e-05,
"loss": 0.768,
"step": 193
},
{
"epoch": 0.17152961980548187,
"grad_norm": 0.32929784059524536,
"learning_rate": 1.9946143913329582e-05,
"loss": 0.7687,
"step": 194
},
{
"epoch": 0.1724137931034483,
"grad_norm": 0.3521505892276764,
"learning_rate": 1.994541433471046e-05,
"loss": 0.7678,
"step": 195
},
{
"epoch": 0.17329796640141468,
"grad_norm": 0.3195169270038605,
"learning_rate": 1.9944679861071795e-05,
"loss": 0.7745,
"step": 196
},
{
"epoch": 0.17418213969938107,
"grad_norm": 0.33620986342430115,
"learning_rate": 1.9943940492775088e-05,
"loss": 0.7623,
"step": 197
},
{
"epoch": 0.17506631299734748,
"grad_norm": 0.33872082829475403,
"learning_rate": 1.9943196230184244e-05,
"loss": 0.7364,
"step": 198
},
{
"epoch": 0.17595048629531387,
"grad_norm": 0.31908750534057617,
"learning_rate": 1.9942447073665584e-05,
"loss": 0.7574,
"step": 199
},
{
"epoch": 0.1768346595932803,
"grad_norm": 0.32828450202941895,
"learning_rate": 1.9941693023587835e-05,
"loss": 0.7298,
"step": 200
},
{
"epoch": 0.17771883289124668,
"grad_norm": 0.3618651032447815,
"learning_rate": 1.994093408032213e-05,
"loss": 0.7628,
"step": 201
},
{
"epoch": 0.1786030061892131,
"grad_norm": 0.3187827467918396,
"learning_rate": 1.994017024424201e-05,
"loss": 0.7637,
"step": 202
},
{
"epoch": 0.1794871794871795,
"grad_norm": 0.3086111545562744,
"learning_rate": 1.993940151572343e-05,
"loss": 0.7774,
"step": 203
},
{
"epoch": 0.18037135278514588,
"grad_norm": 0.32847389578819275,
"learning_rate": 1.993862789514475e-05,
"loss": 0.7695,
"step": 204
},
{
"epoch": 0.1812555260831123,
"grad_norm": 0.30278027057647705,
"learning_rate": 1.993784938288673e-05,
"loss": 0.7551,
"step": 205
},
{
"epoch": 0.18213969938107868,
"grad_norm": 0.36540716886520386,
"learning_rate": 1.9937065979332555e-05,
"loss": 0.7483,
"step": 206
},
{
"epoch": 0.1830238726790451,
"grad_norm": 0.32073548436164856,
"learning_rate": 1.9936277684867803e-05,
"loss": 0.8006,
"step": 207
},
{
"epoch": 0.1839080459770115,
"grad_norm": 0.31449392437934875,
"learning_rate": 1.9935484499880457e-05,
"loss": 0.7635,
"step": 208
},
{
"epoch": 0.1847922192749779,
"grad_norm": 0.35007962584495544,
"learning_rate": 1.9934686424760927e-05,
"loss": 0.7557,
"step": 209
},
{
"epoch": 0.1856763925729443,
"grad_norm": 0.3197214603424072,
"learning_rate": 1.9933883459902003e-05,
"loss": 0.7362,
"step": 210
},
{
"epoch": 0.1865605658709107,
"grad_norm": 0.3293899893760681,
"learning_rate": 1.9933075605698904e-05,
"loss": 0.7898,
"step": 211
},
{
"epoch": 0.1874447391688771,
"grad_norm": 0.31481003761291504,
"learning_rate": 1.9932262862549244e-05,
"loss": 0.7562,
"step": 212
},
{
"epoch": 0.1883289124668435,
"grad_norm": 0.3162625730037689,
"learning_rate": 1.9931445230853046e-05,
"loss": 0.7517,
"step": 213
},
{
"epoch": 0.1892130857648099,
"grad_norm": 0.3140109181404114,
"learning_rate": 1.993062271101274e-05,
"loss": 0.789,
"step": 214
},
{
"epoch": 0.1900972590627763,
"grad_norm": 0.3208922743797302,
"learning_rate": 1.9929795303433158e-05,
"loss": 0.7554,
"step": 215
},
{
"epoch": 0.1909814323607427,
"grad_norm": 0.31978777050971985,
"learning_rate": 1.992896300852154e-05,
"loss": 0.7588,
"step": 216
},
{
"epoch": 0.1918656056587091,
"grad_norm": 0.31362977623939514,
"learning_rate": 1.992812582668754e-05,
"loss": 0.7661,
"step": 217
},
{
"epoch": 0.1927497789566755,
"grad_norm": 0.3093729019165039,
"learning_rate": 1.99272837583432e-05,
"loss": 0.7597,
"step": 218
},
{
"epoch": 0.19363395225464192,
"grad_norm": 0.30044859647750854,
"learning_rate": 1.992643680390298e-05,
"loss": 0.7425,
"step": 219
},
{
"epoch": 0.1945181255526083,
"grad_norm": 0.31671592593193054,
"learning_rate": 1.992558496378374e-05,
"loss": 0.765,
"step": 220
},
{
"epoch": 0.19540229885057472,
"grad_norm": 0.31895530223846436,
"learning_rate": 1.992472823840475e-05,
"loss": 0.7499,
"step": 221
},
{
"epoch": 0.1962864721485411,
"grad_norm": 0.29004424810409546,
"learning_rate": 1.992386662818768e-05,
"loss": 0.7243,
"step": 222
},
{
"epoch": 0.1971706454465075,
"grad_norm": 0.3262535631656647,
"learning_rate": 1.99230001335566e-05,
"loss": 0.7615,
"step": 223
},
{
"epoch": 0.19805481874447392,
"grad_norm": 0.30386170744895935,
"learning_rate": 1.992212875493799e-05,
"loss": 0.7352,
"step": 224
},
{
"epoch": 0.1989389920424403,
"grad_norm": 0.32248827815055847,
"learning_rate": 1.9921252492760733e-05,
"loss": 0.7655,
"step": 225
},
{
"epoch": 0.19982316534040673,
"grad_norm": 0.36855050921440125,
"learning_rate": 1.992037134745612e-05,
"loss": 0.769,
"step": 226
},
{
"epoch": 0.20070733863837312,
"grad_norm": 0.34129512310028076,
"learning_rate": 1.991948531945784e-05,
"loss": 0.7738,
"step": 227
},
{
"epoch": 0.20159151193633953,
"grad_norm": 0.2973238527774811,
"learning_rate": 1.9918594409201976e-05,
"loss": 0.7213,
"step": 228
},
{
"epoch": 0.20247568523430592,
"grad_norm": 0.41102999448776245,
"learning_rate": 1.991769861712703e-05,
"loss": 0.7642,
"step": 229
},
{
"epoch": 0.20335985853227231,
"grad_norm": 0.32703250646591187,
"learning_rate": 1.9916797943673908e-05,
"loss": 0.7629,
"step": 230
},
{
"epoch": 0.20424403183023873,
"grad_norm": 0.32522276043891907,
"learning_rate": 1.9915892389285902e-05,
"loss": 0.7702,
"step": 231
},
{
"epoch": 0.20512820512820512,
"grad_norm": 0.37511447072029114,
"learning_rate": 1.9914981954408715e-05,
"loss": 0.7443,
"step": 232
},
{
"epoch": 0.20601237842617154,
"grad_norm": 0.330055832862854,
"learning_rate": 1.9914066639490458e-05,
"loss": 0.7492,
"step": 233
},
{
"epoch": 0.20689655172413793,
"grad_norm": 0.3460211753845215,
"learning_rate": 1.991314644498164e-05,
"loss": 0.7819,
"step": 234
},
{
"epoch": 0.20778072502210435,
"grad_norm": 0.33654674887657166,
"learning_rate": 1.991222137133516e-05,
"loss": 0.7523,
"step": 235
},
{
"epoch": 0.20866489832007074,
"grad_norm": 0.31626278162002563,
"learning_rate": 1.991129141900634e-05,
"loss": 0.7612,
"step": 236
},
{
"epoch": 0.20954907161803712,
"grad_norm": 0.3241477310657501,
"learning_rate": 1.9910356588452885e-05,
"loss": 0.7567,
"step": 237
},
{
"epoch": 0.21043324491600354,
"grad_norm": 0.31327566504478455,
"learning_rate": 1.990941688013491e-05,
"loss": 0.7607,
"step": 238
},
{
"epoch": 0.21131741821396993,
"grad_norm": 0.30407893657684326,
"learning_rate": 1.990847229451493e-05,
"loss": 0.753,
"step": 239
},
{
"epoch": 0.21220159151193635,
"grad_norm": 0.3187197148799896,
"learning_rate": 1.9907522832057858e-05,
"loss": 0.78,
"step": 240
},
{
"epoch": 0.21308576480990274,
"grad_norm": 0.31609421968460083,
"learning_rate": 1.9906568493231008e-05,
"loss": 0.7483,
"step": 241
},
{
"epoch": 0.21396993810786913,
"grad_norm": 0.3408105969429016,
"learning_rate": 1.9905609278504093e-05,
"loss": 0.7532,
"step": 242
},
{
"epoch": 0.21485411140583555,
"grad_norm": 0.326735258102417,
"learning_rate": 1.9904645188349233e-05,
"loss": 0.7508,
"step": 243
},
{
"epoch": 0.21573828470380194,
"grad_norm": 0.300815224647522,
"learning_rate": 1.9903676223240942e-05,
"loss": 0.7357,
"step": 244
},
{
"epoch": 0.21662245800176835,
"grad_norm": 0.31742265820503235,
"learning_rate": 1.9902702383656127e-05,
"loss": 0.7286,
"step": 245
},
{
"epoch": 0.21750663129973474,
"grad_norm": 0.2978701591491699,
"learning_rate": 1.9901723670074106e-05,
"loss": 0.7274,
"step": 246
},
{
"epoch": 0.21839080459770116,
"grad_norm": 0.3026012182235718,
"learning_rate": 1.9900740082976588e-05,
"loss": 0.7562,
"step": 247
},
{
"epoch": 0.21927497789566755,
"grad_norm": 0.3121882677078247,
"learning_rate": 1.989975162284768e-05,
"loss": 0.7468,
"step": 248
},
{
"epoch": 0.22015915119363394,
"grad_norm": 0.3312043249607086,
"learning_rate": 1.9898758290173906e-05,
"loss": 0.745,
"step": 249
},
{
"epoch": 0.22104332449160036,
"grad_norm": 0.30274179577827454,
"learning_rate": 1.9897760085444156e-05,
"loss": 0.7563,
"step": 250
},
{
"epoch": 0.22192749778956675,
"grad_norm": 0.3054564297199249,
"learning_rate": 1.989675700914974e-05,
"loss": 0.7397,
"step": 251
},
{
"epoch": 0.22281167108753316,
"grad_norm": 0.3345232903957367,
"learning_rate": 1.9895749061784366e-05,
"loss": 0.7305,
"step": 252
},
{
"epoch": 0.22369584438549955,
"grad_norm": 0.304670512676239,
"learning_rate": 1.9894736243844126e-05,
"loss": 0.777,
"step": 253
},
{
"epoch": 0.22458001768346597,
"grad_norm": 0.3238028585910797,
"learning_rate": 1.9893718555827524e-05,
"loss": 0.7801,
"step": 254
},
{
"epoch": 0.22546419098143236,
"grad_norm": 0.2998192608356476,
"learning_rate": 1.9892695998235453e-05,
"loss": 0.7631,
"step": 255
},
{
"epoch": 0.22634836427939875,
"grad_norm": 0.34796053171157837,
"learning_rate": 1.9891668571571203e-05,
"loss": 0.7292,
"step": 256
},
{
"epoch": 0.22723253757736517,
"grad_norm": 0.30835282802581787,
"learning_rate": 1.989063627634046e-05,
"loss": 0.7359,
"step": 257
},
{
"epoch": 0.22811671087533156,
"grad_norm": 0.3030673563480377,
"learning_rate": 1.988959911305131e-05,
"loss": 0.7332,
"step": 258
},
{
"epoch": 0.22900088417329797,
"grad_norm": 0.34073376655578613,
"learning_rate": 1.9888557082214235e-05,
"loss": 0.7394,
"step": 259
},
{
"epoch": 0.22988505747126436,
"grad_norm": 0.3141021132469177,
"learning_rate": 1.9887510184342108e-05,
"loss": 0.7583,
"step": 260
},
{
"epoch": 0.23076923076923078,
"grad_norm": 0.30319637060165405,
"learning_rate": 1.9886458419950203e-05,
"loss": 0.7608,
"step": 261
},
{
"epoch": 0.23165340406719717,
"grad_norm": 0.3428678810596466,
"learning_rate": 1.988540178955618e-05,
"loss": 0.7291,
"step": 262
},
{
"epoch": 0.23253757736516356,
"grad_norm": 0.31113526225090027,
"learning_rate": 1.9884340293680104e-05,
"loss": 0.743,
"step": 263
},
{
"epoch": 0.23342175066312998,
"grad_norm": 0.31108230352401733,
"learning_rate": 1.9883273932844435e-05,
"loss": 0.7522,
"step": 264
},
{
"epoch": 0.23430592396109637,
"grad_norm": 0.31700071692466736,
"learning_rate": 1.9882202707574017e-05,
"loss": 0.7353,
"step": 265
},
{
"epoch": 0.23519009725906279,
"grad_norm": 0.32041996717453003,
"learning_rate": 1.98811266183961e-05,
"loss": 0.7513,
"step": 266
},
{
"epoch": 0.23607427055702918,
"grad_norm": 0.3326590061187744,
"learning_rate": 1.9880045665840324e-05,
"loss": 0.7387,
"step": 267
},
{
"epoch": 0.2369584438549956,
"grad_norm": 0.33623456954956055,
"learning_rate": 1.9878959850438716e-05,
"loss": 0.7379,
"step": 268
},
{
"epoch": 0.23784261715296198,
"grad_norm": 0.33834192156791687,
"learning_rate": 1.987786917272571e-05,
"loss": 0.7534,
"step": 269
},
{
"epoch": 0.23872679045092837,
"grad_norm": 0.318280965089798,
"learning_rate": 1.9876773633238113e-05,
"loss": 0.7456,
"step": 270
},
{
"epoch": 0.2396109637488948,
"grad_norm": 0.3471197783946991,
"learning_rate": 1.9875673232515145e-05,
"loss": 0.7353,
"step": 271
},
{
"epoch": 0.24049513704686118,
"grad_norm": 0.33142876625061035,
"learning_rate": 1.987456797109841e-05,
"loss": 0.7561,
"step": 272
},
{
"epoch": 0.2413793103448276,
"grad_norm": 0.3206813633441925,
"learning_rate": 1.9873457849531906e-05,
"loss": 0.7072,
"step": 273
},
{
"epoch": 0.242263483642794,
"grad_norm": 0.3138003945350647,
"learning_rate": 1.987234286836202e-05,
"loss": 0.7498,
"step": 274
},
{
"epoch": 0.24314765694076038,
"grad_norm": 0.3024766147136688,
"learning_rate": 1.9871223028137533e-05,
"loss": 0.7554,
"step": 275
},
{
"epoch": 0.2440318302387268,
"grad_norm": 0.30262595415115356,
"learning_rate": 1.9870098329409618e-05,
"loss": 0.7448,
"step": 276
},
{
"epoch": 0.24491600353669318,
"grad_norm": 0.3016452193260193,
"learning_rate": 1.986896877273184e-05,
"loss": 0.7336,
"step": 277
},
{
"epoch": 0.2458001768346596,
"grad_norm": 0.3071109652519226,
"learning_rate": 1.9867834358660153e-05,
"loss": 0.7123,
"step": 278
},
{
"epoch": 0.246684350132626,
"grad_norm": 0.3270327150821686,
"learning_rate": 1.9866695087752902e-05,
"loss": 0.7348,
"step": 279
},
{
"epoch": 0.2475685234305924,
"grad_norm": 0.3012430667877197,
"learning_rate": 1.9865550960570824e-05,
"loss": 0.7505,
"step": 280
},
{
"epoch": 0.2484526967285588,
"grad_norm": 0.32532867789268494,
"learning_rate": 1.986440197767705e-05,
"loss": 0.7576,
"step": 281
},
{
"epoch": 0.2493368700265252,
"grad_norm": 0.3224872350692749,
"learning_rate": 1.986324813963709e-05,
"loss": 0.7425,
"step": 282
},
{
"epoch": 0.2502210433244916,
"grad_norm": 0.316292941570282,
"learning_rate": 1.9862089447018847e-05,
"loss": 0.7251,
"step": 283
},
{
"epoch": 0.251105216622458,
"grad_norm": 0.35289883613586426,
"learning_rate": 1.986092590039262e-05,
"loss": 0.7533,
"step": 284
},
{
"epoch": 0.2519893899204244,
"grad_norm": 0.32891616225242615,
"learning_rate": 1.98597575003311e-05,
"loss": 0.7365,
"step": 285
},
{
"epoch": 0.25287356321839083,
"grad_norm": 0.3328152298927307,
"learning_rate": 1.9858584247409355e-05,
"loss": 0.7439,
"step": 286
},
{
"epoch": 0.2537577365163572,
"grad_norm": 0.3570663034915924,
"learning_rate": 1.985740614220484e-05,
"loss": 0.7148,
"step": 287
},
{
"epoch": 0.2546419098143236,
"grad_norm": 0.35808566212654114,
"learning_rate": 1.9856223185297416e-05,
"loss": 0.7366,
"step": 288
},
{
"epoch": 0.25552608311229,
"grad_norm": 0.3262200355529785,
"learning_rate": 1.9855035377269313e-05,
"loss": 0.7223,
"step": 289
},
{
"epoch": 0.2564102564102564,
"grad_norm": 0.3300662040710449,
"learning_rate": 1.9853842718705163e-05,
"loss": 0.7156,
"step": 290
},
{
"epoch": 0.2572944297082228,
"grad_norm": 0.33936625719070435,
"learning_rate": 1.985264521019197e-05,
"loss": 0.7416,
"step": 291
},
{
"epoch": 0.2581786030061892,
"grad_norm": 0.31461718678474426,
"learning_rate": 1.9851442852319144e-05,
"loss": 0.73,
"step": 292
},
{
"epoch": 0.25906277630415564,
"grad_norm": 0.3337997794151306,
"learning_rate": 1.9850235645678468e-05,
"loss": 0.741,
"step": 293
},
{
"epoch": 0.259946949602122,
"grad_norm": 0.3520178198814392,
"learning_rate": 1.9849023590864114e-05,
"loss": 0.7555,
"step": 294
},
{
"epoch": 0.2608311229000884,
"grad_norm": 0.300979346036911,
"learning_rate": 1.9847806688472638e-05,
"loss": 0.7329,
"step": 295
},
{
"epoch": 0.26171529619805484,
"grad_norm": 0.34813445806503296,
"learning_rate": 1.9846584939102996e-05,
"loss": 0.7541,
"step": 296
},
{
"epoch": 0.2625994694960212,
"grad_norm": 0.30120474100112915,
"learning_rate": 1.9845358343356508e-05,
"loss": 0.7132,
"step": 297
},
{
"epoch": 0.2634836427939876,
"grad_norm": 0.3190529942512512,
"learning_rate": 1.9844126901836897e-05,
"loss": 0.7552,
"step": 298
},
{
"epoch": 0.26436781609195403,
"grad_norm": 0.33429914712905884,
"learning_rate": 1.9842890615150267e-05,
"loss": 0.739,
"step": 299
},
{
"epoch": 0.26525198938992045,
"grad_norm": 0.3150491416454315,
"learning_rate": 1.9841649483905092e-05,
"loss": 0.7557,
"step": 300
},
{
"epoch": 0.2661361626878868,
"grad_norm": 0.3498631417751312,
"learning_rate": 1.9840403508712256e-05,
"loss": 0.735,
"step": 301
},
{
"epoch": 0.26702033598585323,
"grad_norm": 0.30264052748680115,
"learning_rate": 1.9839152690185007e-05,
"loss": 0.7071,
"step": 302
},
{
"epoch": 0.26790450928381965,
"grad_norm": 0.31947246193885803,
"learning_rate": 1.9837897028938986e-05,
"loss": 0.7276,
"step": 303
},
{
"epoch": 0.268788682581786,
"grad_norm": 0.3197843134403229,
"learning_rate": 1.9836636525592212e-05,
"loss": 0.7448,
"step": 304
},
{
"epoch": 0.2696728558797524,
"grad_norm": 0.3086130619049072,
"learning_rate": 1.9835371180765092e-05,
"loss": 0.7234,
"step": 305
},
{
"epoch": 0.27055702917771884,
"grad_norm": 0.3283665180206299,
"learning_rate": 1.983410099508042e-05,
"loss": 0.7372,
"step": 306
},
{
"epoch": 0.27144120247568526,
"grad_norm": 0.3397913873195648,
"learning_rate": 1.9832825969163358e-05,
"loss": 0.7387,
"step": 307
},
{
"epoch": 0.2723253757736516,
"grad_norm": 0.32947415113449097,
"learning_rate": 1.9831546103641465e-05,
"loss": 0.737,
"step": 308
},
{
"epoch": 0.27320954907161804,
"grad_norm": 0.30034953355789185,
"learning_rate": 1.983026139914467e-05,
"loss": 0.7664,
"step": 309
},
{
"epoch": 0.27409372236958446,
"grad_norm": 0.30004483461380005,
"learning_rate": 1.98289718563053e-05,
"loss": 0.7437,
"step": 310
},
{
"epoch": 0.2749778956675508,
"grad_norm": 0.33071067929267883,
"learning_rate": 1.9827677475758044e-05,
"loss": 0.7302,
"step": 311
},
{
"epoch": 0.27586206896551724,
"grad_norm": 0.31142210960388184,
"learning_rate": 1.9826378258139983e-05,
"loss": 0.7497,
"step": 312
},
{
"epoch": 0.27674624226348365,
"grad_norm": 0.2842351794242859,
"learning_rate": 1.9825074204090583e-05,
"loss": 0.7079,
"step": 313
},
{
"epoch": 0.27763041556145,
"grad_norm": 0.3006134033203125,
"learning_rate": 1.982376531425168e-05,
"loss": 0.7454,
"step": 314
},
{
"epoch": 0.27851458885941643,
"grad_norm": 0.3104064464569092,
"learning_rate": 1.982245158926749e-05,
"loss": 0.7319,
"step": 315
},
{
"epoch": 0.27939876215738285,
"grad_norm": 0.3597480356693268,
"learning_rate": 1.9821133029784622e-05,
"loss": 0.7321,
"step": 316
},
{
"epoch": 0.28028293545534927,
"grad_norm": 0.31946197152137756,
"learning_rate": 1.9819809636452053e-05,
"loss": 0.7227,
"step": 317
},
{
"epoch": 0.28116710875331563,
"grad_norm": 0.3047025501728058,
"learning_rate": 1.981848140992114e-05,
"loss": 0.7316,
"step": 318
},
{
"epoch": 0.28205128205128205,
"grad_norm": 0.32287970185279846,
"learning_rate": 1.9817148350845622e-05,
"loss": 0.7344,
"step": 319
},
{
"epoch": 0.28293545534924847,
"grad_norm": 0.30726414918899536,
"learning_rate": 1.981581045988162e-05,
"loss": 0.7482,
"step": 320
},
{
"epoch": 0.2838196286472148,
"grad_norm": 0.3184090256690979,
"learning_rate": 1.9814467737687622e-05,
"loss": 0.7131,
"step": 321
},
{
"epoch": 0.28470380194518125,
"grad_norm": 0.30524560809135437,
"learning_rate": 1.9813120184924504e-05,
"loss": 0.7295,
"step": 322
},
{
"epoch": 0.28558797524314766,
"grad_norm": 0.31186625361442566,
"learning_rate": 1.9811767802255515e-05,
"loss": 0.7212,
"step": 323
},
{
"epoch": 0.2864721485411141,
"grad_norm": 0.3329918682575226,
"learning_rate": 1.981041059034628e-05,
"loss": 0.7414,
"step": 324
},
{
"epoch": 0.28735632183908044,
"grad_norm": 0.31521108746528625,
"learning_rate": 1.9809048549864808e-05,
"loss": 0.7462,
"step": 325
},
{
"epoch": 0.28824049513704686,
"grad_norm": 0.311262845993042,
"learning_rate": 1.9807681681481477e-05,
"loss": 0.7359,
"step": 326
},
{
"epoch": 0.2891246684350133,
"grad_norm": 0.3252621293067932,
"learning_rate": 1.980630998586905e-05,
"loss": 0.7154,
"step": 327
},
{
"epoch": 0.29000884173297964,
"grad_norm": 0.28999894857406616,
"learning_rate": 1.980493346370265e-05,
"loss": 0.7385,
"step": 328
},
{
"epoch": 0.29089301503094606,
"grad_norm": 0.3143526017665863,
"learning_rate": 1.9803552115659787e-05,
"loss": 0.7347,
"step": 329
},
{
"epoch": 0.2917771883289125,
"grad_norm": 0.3156179189682007,
"learning_rate": 1.9802165942420352e-05,
"loss": 0.747,
"step": 330
},
{
"epoch": 0.2926613616268789,
"grad_norm": 0.3086904287338257,
"learning_rate": 1.9800774944666597e-05,
"loss": 0.7356,
"step": 331
},
{
"epoch": 0.29354553492484525,
"grad_norm": 0.29627615213394165,
"learning_rate": 1.9799379123083162e-05,
"loss": 0.757,
"step": 332
},
{
"epoch": 0.29442970822281167,
"grad_norm": 0.30289310216903687,
"learning_rate": 1.9797978478357048e-05,
"loss": 0.7249,
"step": 333
},
{
"epoch": 0.2953138815207781,
"grad_norm": 0.3012111783027649,
"learning_rate": 1.979657301117764e-05,
"loss": 0.7118,
"step": 334
},
{
"epoch": 0.29619805481874445,
"grad_norm": 0.2936379611492157,
"learning_rate": 1.979516272223669e-05,
"loss": 0.7247,
"step": 335
},
{
"epoch": 0.29708222811671087,
"grad_norm": 0.306845486164093,
"learning_rate": 1.9793747612228328e-05,
"loss": 0.7534,
"step": 336
},
{
"epoch": 0.2979664014146773,
"grad_norm": 0.31245139241218567,
"learning_rate": 1.979232768184905e-05,
"loss": 0.7298,
"step": 337
},
{
"epoch": 0.2988505747126437,
"grad_norm": 0.28984421491622925,
"learning_rate": 1.9790902931797743e-05,
"loss": 0.7131,
"step": 338
},
{
"epoch": 0.29973474801061006,
"grad_norm": 0.2943040132522583,
"learning_rate": 1.978947336277564e-05,
"loss": 0.7345,
"step": 339
},
{
"epoch": 0.3006189213085765,
"grad_norm": 0.31156396865844727,
"learning_rate": 1.9788038975486363e-05,
"loss": 0.7538,
"step": 340
},
{
"epoch": 0.3015030946065429,
"grad_norm": 0.2971622049808502,
"learning_rate": 1.97865997706359e-05,
"loss": 0.7532,
"step": 341
},
{
"epoch": 0.30238726790450926,
"grad_norm": 0.31644076108932495,
"learning_rate": 1.9785155748932615e-05,
"loss": 0.7221,
"step": 342
},
{
"epoch": 0.3032714412024757,
"grad_norm": 0.3032969832420349,
"learning_rate": 1.978370691108723e-05,
"loss": 0.7075,
"step": 343
},
{
"epoch": 0.3041556145004421,
"grad_norm": 0.31852713227272034,
"learning_rate": 1.9782253257812855e-05,
"loss": 0.7373,
"step": 344
},
{
"epoch": 0.3050397877984085,
"grad_norm": 0.29853355884552,
"learning_rate": 1.978079478982496e-05,
"loss": 0.717,
"step": 345
},
{
"epoch": 0.3059239610963749,
"grad_norm": 0.34089395403862,
"learning_rate": 1.9779331507841385e-05,
"loss": 0.7614,
"step": 346
},
{
"epoch": 0.3068081343943413,
"grad_norm": 0.31264424324035645,
"learning_rate": 1.977786341258234e-05,
"loss": 0.7257,
"step": 347
},
{
"epoch": 0.3076923076923077,
"grad_norm": 0.2974138557910919,
"learning_rate": 1.9776390504770405e-05,
"loss": 0.7081,
"step": 348
},
{
"epoch": 0.30857648099027407,
"grad_norm": 0.32469305396080017,
"learning_rate": 1.9774912785130535e-05,
"loss": 0.7566,
"step": 349
},
{
"epoch": 0.3094606542882405,
"grad_norm": 0.3054787516593933,
"learning_rate": 1.9773430254390033e-05,
"loss": 0.7155,
"step": 350
},
{
"epoch": 0.3103448275862069,
"grad_norm": 0.2864859700202942,
"learning_rate": 1.9771942913278596e-05,
"loss": 0.7021,
"step": 351
},
{
"epoch": 0.3112290008841733,
"grad_norm": 0.3371618688106537,
"learning_rate": 1.977045076252827e-05,
"loss": 0.7576,
"step": 352
},
{
"epoch": 0.3121131741821397,
"grad_norm": 0.29646798968315125,
"learning_rate": 1.976895380287348e-05,
"loss": 0.7146,
"step": 353
},
{
"epoch": 0.3129973474801061,
"grad_norm": 0.3251480162143707,
"learning_rate": 1.9767452035051e-05,
"loss": 0.7321,
"step": 354
},
{
"epoch": 0.3138815207780725,
"grad_norm": 0.3068424165248871,
"learning_rate": 1.9765945459800004e-05,
"loss": 0.7243,
"step": 355
},
{
"epoch": 0.3147656940760389,
"grad_norm": 0.31935080885887146,
"learning_rate": 1.9764434077861992e-05,
"loss": 0.7144,
"step": 356
},
{
"epoch": 0.3156498673740053,
"grad_norm": 0.30527573823928833,
"learning_rate": 1.976291788998086e-05,
"loss": 0.7327,
"step": 357
},
{
"epoch": 0.3165340406719717,
"grad_norm": 0.29151904582977295,
"learning_rate": 1.9761396896902848e-05,
"loss": 0.7371,
"step": 358
},
{
"epoch": 0.31741821396993813,
"grad_norm": 0.3098064363002777,
"learning_rate": 1.9759871099376583e-05,
"loss": 0.7437,
"step": 359
},
{
"epoch": 0.3183023872679045,
"grad_norm": 0.31516727805137634,
"learning_rate": 1.975834049815304e-05,
"loss": 0.7361,
"step": 360
},
{
"epoch": 0.3191865605658709,
"grad_norm": 0.31510454416275024,
"learning_rate": 1.9756805093985564e-05,
"loss": 0.7137,
"step": 361
},
{
"epoch": 0.32007073386383733,
"grad_norm": 0.3104992210865021,
"learning_rate": 1.9755264887629863e-05,
"loss": 0.7152,
"step": 362
},
{
"epoch": 0.3209549071618037,
"grad_norm": 0.33225691318511963,
"learning_rate": 1.9753719879844007e-05,
"loss": 0.7562,
"step": 363
},
{
"epoch": 0.3218390804597701,
"grad_norm": 0.31872305274009705,
"learning_rate": 1.9752170071388433e-05,
"loss": 0.7218,
"step": 364
},
{
"epoch": 0.32272325375773653,
"grad_norm": 0.32427480816841125,
"learning_rate": 1.975061546302594e-05,
"loss": 0.7395,
"step": 365
},
{
"epoch": 0.32360742705570295,
"grad_norm": 0.3218376338481903,
"learning_rate": 1.974905605552169e-05,
"loss": 0.7365,
"step": 366
},
{
"epoch": 0.3244916003536693,
"grad_norm": 0.3075506389141083,
"learning_rate": 1.97474918496432e-05,
"loss": 0.7411,
"step": 367
},
{
"epoch": 0.3253757736516357,
"grad_norm": 0.3121081590652466,
"learning_rate": 1.974592284616036e-05,
"loss": 0.7196,
"step": 368
},
{
"epoch": 0.32625994694960214,
"grad_norm": 0.29194772243499756,
"learning_rate": 1.974434904584541e-05,
"loss": 0.6967,
"step": 369
},
{
"epoch": 0.3271441202475685,
"grad_norm": 0.312102735042572,
"learning_rate": 1.9742770449472966e-05,
"loss": 0.7163,
"step": 370
},
{
"epoch": 0.3280282935455349,
"grad_norm": 0.3106652796268463,
"learning_rate": 1.9741187057819985e-05,
"loss": 0.7425,
"step": 371
},
{
"epoch": 0.32891246684350134,
"grad_norm": 0.29172608256340027,
"learning_rate": 1.9739598871665798e-05,
"loss": 0.7069,
"step": 372
},
{
"epoch": 0.3297966401414677,
"grad_norm": 0.2911004424095154,
"learning_rate": 1.9738005891792093e-05,
"loss": 0.7056,
"step": 373
},
{
"epoch": 0.3306808134394341,
"grad_norm": 0.697097659111023,
"learning_rate": 1.9736408118982916e-05,
"loss": 0.7305,
"step": 374
},
{
"epoch": 0.33156498673740054,
"grad_norm": 0.2944968044757843,
"learning_rate": 1.9734805554024674e-05,
"loss": 0.734,
"step": 375
},
{
"epoch": 0.33244916003536695,
"grad_norm": 0.29519593715667725,
"learning_rate": 1.973319819770613e-05,
"loss": 0.7162,
"step": 376
},
{
"epoch": 0.3333333333333333,
"grad_norm": 0.30538448691368103,
"learning_rate": 1.9731586050818407e-05,
"loss": 0.7335,
"step": 377
},
{
"epoch": 0.33421750663129973,
"grad_norm": 0.3083533048629761,
"learning_rate": 1.972996911415498e-05,
"loss": 0.7326,
"step": 378
},
{
"epoch": 0.33510167992926615,
"grad_norm": 0.28719231486320496,
"learning_rate": 1.9728347388511693e-05,
"loss": 0.735,
"step": 379
},
{
"epoch": 0.3359858532272325,
"grad_norm": 0.3111599087715149,
"learning_rate": 1.972672087468674e-05,
"loss": 0.7376,
"step": 380
},
{
"epoch": 0.33687002652519893,
"grad_norm": 0.3131982386112213,
"learning_rate": 1.972508957348067e-05,
"loss": 0.7078,
"step": 381
},
{
"epoch": 0.33775419982316535,
"grad_norm": 0.29568973183631897,
"learning_rate": 1.9723453485696396e-05,
"loss": 0.7608,
"step": 382
},
{
"epoch": 0.33863837312113176,
"grad_norm": 0.3348521888256073,
"learning_rate": 1.972181261213918e-05,
"loss": 0.7409,
"step": 383
},
{
"epoch": 0.3395225464190981,
"grad_norm": 0.8223048448562622,
"learning_rate": 1.9720166953616633e-05,
"loss": 0.7124,
"step": 384
},
{
"epoch": 0.34040671971706454,
"grad_norm": 0.30228742957115173,
"learning_rate": 1.971851651093874e-05,
"loss": 0.7145,
"step": 385
},
{
"epoch": 0.34129089301503096,
"grad_norm": 0.3235529065132141,
"learning_rate": 1.9716861284917822e-05,
"loss": 0.7631,
"step": 386
},
{
"epoch": 0.3421750663129973,
"grad_norm": 0.3403104245662689,
"learning_rate": 1.971520127636857e-05,
"loss": 0.7221,
"step": 387
},
{
"epoch": 0.34305923961096374,
"grad_norm": 0.31708213686943054,
"learning_rate": 1.971353648610802e-05,
"loss": 0.718,
"step": 388
},
{
"epoch": 0.34394341290893016,
"grad_norm": 0.34162816405296326,
"learning_rate": 1.9711866914955558e-05,
"loss": 0.725,
"step": 389
},
{
"epoch": 0.3448275862068966,
"grad_norm": 0.3519470989704132,
"learning_rate": 1.971019256373293e-05,
"loss": 0.7218,
"step": 390
},
{
"epoch": 0.34571175950486294,
"grad_norm": 0.3420668840408325,
"learning_rate": 1.9708513433264232e-05,
"loss": 0.7235,
"step": 391
},
{
"epoch": 0.34659593280282935,
"grad_norm": 0.3246281147003174,
"learning_rate": 1.9706829524375917e-05,
"loss": 0.7393,
"step": 392
},
{
"epoch": 0.34748010610079577,
"grad_norm": 0.30855509638786316,
"learning_rate": 1.970514083789678e-05,
"loss": 0.714,
"step": 393
},
{
"epoch": 0.34836427939876213,
"grad_norm": 0.30177581310272217,
"learning_rate": 1.9703447374657977e-05,
"loss": 0.7195,
"step": 394
},
{
"epoch": 0.34924845269672855,
"grad_norm": 0.3111273944377899,
"learning_rate": 1.970174913549301e-05,
"loss": 0.7248,
"step": 395
},
{
"epoch": 0.35013262599469497,
"grad_norm": 0.2822318375110626,
"learning_rate": 1.970004612123773e-05,
"loss": 0.6988,
"step": 396
},
{
"epoch": 0.3510167992926614,
"grad_norm": 0.3094327449798584,
"learning_rate": 1.9698338332730345e-05,
"loss": 0.6981,
"step": 397
},
{
"epoch": 0.35190097259062775,
"grad_norm": 0.30267050862312317,
"learning_rate": 1.9696625770811412e-05,
"loss": 0.7472,
"step": 398
},
{
"epoch": 0.35278514588859416,
"grad_norm": 0.3122124671936035,
"learning_rate": 1.969490843632383e-05,
"loss": 0.7003,
"step": 399
},
{
"epoch": 0.3536693191865606,
"grad_norm": 0.3039180338382721,
"learning_rate": 1.969318633011285e-05,
"loss": 0.7406,
"step": 400
},
{
"epoch": 0.35455349248452694,
"grad_norm": 0.3002283275127411,
"learning_rate": 1.9691459453026076e-05,
"loss": 0.7077,
"step": 401
},
{
"epoch": 0.35543766578249336,
"grad_norm": 0.2950460910797119,
"learning_rate": 1.9689727805913458e-05,
"loss": 0.7108,
"step": 402
},
{
"epoch": 0.3563218390804598,
"grad_norm": 0.30414167046546936,
"learning_rate": 1.968799138962729e-05,
"loss": 0.7418,
"step": 403
},
{
"epoch": 0.3572060123784262,
"grad_norm": 0.29464274644851685,
"learning_rate": 1.9686250205022216e-05,
"loss": 0.7412,
"step": 404
},
{
"epoch": 0.35809018567639256,
"grad_norm": 0.2845623195171356,
"learning_rate": 1.968450425295523e-05,
"loss": 0.6918,
"step": 405
},
{
"epoch": 0.358974358974359,
"grad_norm": 0.3196525275707245,
"learning_rate": 1.968275353428567e-05,
"loss": 0.7268,
"step": 406
},
{
"epoch": 0.3598585322723254,
"grad_norm": 0.31612008810043335,
"learning_rate": 1.968099804987522e-05,
"loss": 0.7152,
"step": 407
},
{
"epoch": 0.36074270557029176,
"grad_norm": 0.3174068033695221,
"learning_rate": 1.9679237800587906e-05,
"loss": 0.7225,
"step": 408
},
{
"epoch": 0.3616268788682582,
"grad_norm": 0.3130561411380768,
"learning_rate": 1.9677472787290103e-05,
"loss": 0.7178,
"step": 409
},
{
"epoch": 0.3625110521662246,
"grad_norm": 0.3186300992965698,
"learning_rate": 1.9675703010850533e-05,
"loss": 0.6892,
"step": 410
},
{
"epoch": 0.363395225464191,
"grad_norm": 0.3219105899333954,
"learning_rate": 1.967392847214026e-05,
"loss": 0.7222,
"step": 411
},
{
"epoch": 0.36427939876215737,
"grad_norm": 0.3322054147720337,
"learning_rate": 1.9672149172032688e-05,
"loss": 0.7194,
"step": 412
},
{
"epoch": 0.3651635720601238,
"grad_norm": 0.30562153458595276,
"learning_rate": 1.967036511140357e-05,
"loss": 0.7107,
"step": 413
},
{
"epoch": 0.3660477453580902,
"grad_norm": 0.3107975125312805,
"learning_rate": 1.9668576291131005e-05,
"loss": 0.7247,
"step": 414
},
{
"epoch": 0.36693191865605657,
"grad_norm": 0.3102024793624878,
"learning_rate": 1.9666782712095422e-05,
"loss": 0.7259,
"step": 415
},
{
"epoch": 0.367816091954023,
"grad_norm": 0.28330785036087036,
"learning_rate": 1.9664984375179605e-05,
"loss": 0.691,
"step": 416
},
{
"epoch": 0.3687002652519894,
"grad_norm": 0.33220580220222473,
"learning_rate": 1.9663181281268674e-05,
"loss": 0.7287,
"step": 417
},
{
"epoch": 0.3695844385499558,
"grad_norm": 0.29663577675819397,
"learning_rate": 1.9661373431250092e-05,
"loss": 0.6976,
"step": 418
},
{
"epoch": 0.3704686118479222,
"grad_norm": 0.33423173427581787,
"learning_rate": 1.9659560826013663e-05,
"loss": 0.7425,
"step": 419
},
{
"epoch": 0.3713527851458886,
"grad_norm": 0.29327669739723206,
"learning_rate": 1.9657743466451524e-05,
"loss": 0.7248,
"step": 420
},
{
"epoch": 0.372236958443855,
"grad_norm": 0.34733760356903076,
"learning_rate": 1.965592135345817e-05,
"loss": 0.7483,
"step": 421
},
{
"epoch": 0.3731211317418214,
"grad_norm": 0.3145272731781006,
"learning_rate": 1.965409448793041e-05,
"loss": 0.7019,
"step": 422
},
{
"epoch": 0.3740053050397878,
"grad_norm": 0.31887269020080566,
"learning_rate": 1.965226287076742e-05,
"loss": 0.7297,
"step": 423
},
{
"epoch": 0.3748894783377542,
"grad_norm": 0.3094770014286041,
"learning_rate": 1.9650426502870693e-05,
"loss": 0.6995,
"step": 424
},
{
"epoch": 0.3757736516357206,
"grad_norm": 0.2937994599342346,
"learning_rate": 1.964858538514407e-05,
"loss": 0.7042,
"step": 425
},
{
"epoch": 0.376657824933687,
"grad_norm": 0.30503976345062256,
"learning_rate": 1.9646739518493732e-05,
"loss": 0.7312,
"step": 426
},
{
"epoch": 0.3775419982316534,
"grad_norm": 0.30092528462409973,
"learning_rate": 1.964488890382819e-05,
"loss": 0.7251,
"step": 427
},
{
"epoch": 0.3784261715296198,
"grad_norm": 0.2824142873287201,
"learning_rate": 1.9643033542058293e-05,
"loss": 0.7158,
"step": 428
},
{
"epoch": 0.3793103448275862,
"grad_norm": 0.33134981989860535,
"learning_rate": 1.9641173434097234e-05,
"loss": 0.7071,
"step": 429
},
{
"epoch": 0.3801945181255526,
"grad_norm": 0.307426393032074,
"learning_rate": 1.9639308580860533e-05,
"loss": 0.7038,
"step": 430
},
{
"epoch": 0.381078691423519,
"grad_norm": 0.28841471672058105,
"learning_rate": 1.9637438983266054e-05,
"loss": 0.6905,
"step": 431
},
{
"epoch": 0.3819628647214854,
"grad_norm": 0.32645469903945923,
"learning_rate": 1.9635564642233988e-05,
"loss": 0.7469,
"step": 432
},
{
"epoch": 0.3828470380194518,
"grad_norm": 0.33705416321754456,
"learning_rate": 1.9633685558686862e-05,
"loss": 0.7389,
"step": 433
},
{
"epoch": 0.3837312113174182,
"grad_norm": 0.28802844882011414,
"learning_rate": 1.9631801733549543e-05,
"loss": 0.7051,
"step": 434
},
{
"epoch": 0.38461538461538464,
"grad_norm": 0.3462058901786804,
"learning_rate": 1.962991316774923e-05,
"loss": 0.7286,
"step": 435
},
{
"epoch": 0.385499557913351,
"grad_norm": 0.33302977681159973,
"learning_rate": 1.9628019862215448e-05,
"loss": 0.7058,
"step": 436
},
{
"epoch": 0.3863837312113174,
"grad_norm": 0.3011861741542816,
"learning_rate": 1.9626121817880068e-05,
"loss": 0.74,
"step": 437
},
{
"epoch": 0.38726790450928383,
"grad_norm": 0.34865158796310425,
"learning_rate": 1.9624219035677278e-05,
"loss": 0.7261,
"step": 438
},
{
"epoch": 0.3881520778072502,
"grad_norm": 0.28322339057922363,
"learning_rate": 1.9622311516543612e-05,
"loss": 0.6971,
"step": 439
},
{
"epoch": 0.3890362511052166,
"grad_norm": 0.32249921560287476,
"learning_rate": 1.9620399261417925e-05,
"loss": 0.7246,
"step": 440
},
{
"epoch": 0.38992042440318303,
"grad_norm": 0.3132973909378052,
"learning_rate": 1.961848227124141e-05,
"loss": 0.7202,
"step": 441
},
{
"epoch": 0.39080459770114945,
"grad_norm": 0.31046420335769653,
"learning_rate": 1.9616560546957585e-05,
"loss": 0.7094,
"step": 442
},
{
"epoch": 0.3916887709991158,
"grad_norm": 0.303803414106369,
"learning_rate": 1.96146340895123e-05,
"loss": 0.7001,
"step": 443
},
{
"epoch": 0.3925729442970822,
"grad_norm": 0.3086932599544525,
"learning_rate": 1.9612702899853744e-05,
"loss": 0.7135,
"step": 444
},
{
"epoch": 0.39345711759504864,
"grad_norm": 0.2989056408405304,
"learning_rate": 1.9610766978932415e-05,
"loss": 0.6945,
"step": 445
},
{
"epoch": 0.394341290893015,
"grad_norm": 0.3398056924343109,
"learning_rate": 1.9608826327701156e-05,
"loss": 0.7123,
"step": 446
},
{
"epoch": 0.3952254641909814,
"grad_norm": 0.28489160537719727,
"learning_rate": 1.9606880947115134e-05,
"loss": 0.6935,
"step": 447
},
{
"epoch": 0.39610963748894784,
"grad_norm": 0.32360243797302246,
"learning_rate": 1.9604930838131845e-05,
"loss": 0.7254,
"step": 448
},
{
"epoch": 0.39699381078691426,
"grad_norm": 0.3027465045452118,
"learning_rate": 1.9602976001711104e-05,
"loss": 0.7225,
"step": 449
},
{
"epoch": 0.3978779840848806,
"grad_norm": 0.3090364933013916,
"learning_rate": 1.960101643881507e-05,
"loss": 0.7254,
"step": 450
},
{
"epoch": 0.39876215738284704,
"grad_norm": 0.3089822232723236,
"learning_rate": 1.9599052150408205e-05,
"loss": 0.7407,
"step": 451
},
{
"epoch": 0.39964633068081346,
"grad_norm": 0.29422715306282043,
"learning_rate": 1.9597083137457316e-05,
"loss": 0.736,
"step": 452
},
{
"epoch": 0.4005305039787798,
"grad_norm": 0.3101896047592163,
"learning_rate": 1.9595109400931525e-05,
"loss": 0.7077,
"step": 453
},
{
"epoch": 0.40141467727674623,
"grad_norm": 0.2899080216884613,
"learning_rate": 1.9593130941802293e-05,
"loss": 0.7031,
"step": 454
},
{
"epoch": 0.40229885057471265,
"grad_norm": 0.32698938250541687,
"learning_rate": 1.9591147761043384e-05,
"loss": 0.7346,
"step": 455
},
{
"epoch": 0.40318302387267907,
"grad_norm": 0.2984412908554077,
"learning_rate": 1.95891598596309e-05,
"loss": 0.7278,
"step": 456
},
{
"epoch": 0.40406719717064543,
"grad_norm": 0.3102678656578064,
"learning_rate": 1.9587167238543263e-05,
"loss": 0.7168,
"step": 457
},
{
"epoch": 0.40495137046861185,
"grad_norm": 0.3104475736618042,
"learning_rate": 1.958516989876122e-05,
"loss": 0.7223,
"step": 458
},
{
"epoch": 0.40583554376657827,
"grad_norm": 0.29723411798477173,
"learning_rate": 1.958316784126784e-05,
"loss": 0.7088,
"step": 459
},
{
"epoch": 0.40671971706454463,
"grad_norm": 0.294161319732666,
"learning_rate": 1.958116106704851e-05,
"loss": 0.6976,
"step": 460
},
{
"epoch": 0.40760389036251105,
"grad_norm": 0.3195638060569763,
"learning_rate": 1.9579149577090944e-05,
"loss": 0.7385,
"step": 461
},
{
"epoch": 0.40848806366047746,
"grad_norm": 0.314506858587265,
"learning_rate": 1.957713337238517e-05,
"loss": 0.6914,
"step": 462
},
{
"epoch": 0.4093722369584439,
"grad_norm": 0.32924503087997437,
"learning_rate": 1.9575112453923545e-05,
"loss": 0.7194,
"step": 463
},
{
"epoch": 0.41025641025641024,
"grad_norm": 0.2915407717227936,
"learning_rate": 1.9573086822700742e-05,
"loss": 0.7187,
"step": 464
},
{
"epoch": 0.41114058355437666,
"grad_norm": 0.31798258423805237,
"learning_rate": 1.957105647971375e-05,
"loss": 0.7142,
"step": 465
},
{
"epoch": 0.4120247568523431,
"grad_norm": 0.2982095777988434,
"learning_rate": 1.9569021425961888e-05,
"loss": 0.7263,
"step": 466
},
{
"epoch": 0.41290893015030944,
"grad_norm": 0.31103476881980896,
"learning_rate": 1.9566981662446774e-05,
"loss": 0.6761,
"step": 467
},
{
"epoch": 0.41379310344827586,
"grad_norm": 0.34996750950813293,
"learning_rate": 1.9564937190172372e-05,
"loss": 0.7145,
"step": 468
},
{
"epoch": 0.4146772767462423,
"grad_norm": 0.29701176285743713,
"learning_rate": 1.9562888010144933e-05,
"loss": 0.7151,
"step": 469
},
{
"epoch": 0.4155614500442087,
"grad_norm": 0.31011074781417847,
"learning_rate": 1.956083412337305e-05,
"loss": 0.7204,
"step": 470
},
{
"epoch": 0.41644562334217505,
"grad_norm": 0.28999054431915283,
"learning_rate": 1.9558775530867618e-05,
"loss": 0.7268,
"step": 471
},
{
"epoch": 0.41732979664014147,
"grad_norm": 0.3249395787715912,
"learning_rate": 1.9556712233641856e-05,
"loss": 0.7396,
"step": 472
},
{
"epoch": 0.4182139699381079,
"grad_norm": 0.31111451983451843,
"learning_rate": 1.9554644232711292e-05,
"loss": 0.7461,
"step": 473
},
{
"epoch": 0.41909814323607425,
"grad_norm": 0.31386739015579224,
"learning_rate": 1.9552571529093774e-05,
"loss": 0.7352,
"step": 474
},
{
"epoch": 0.41998231653404067,
"grad_norm": 0.33240604400634766,
"learning_rate": 1.9550494123809463e-05,
"loss": 0.7279,
"step": 475
},
{
"epoch": 0.4208664898320071,
"grad_norm": 0.28963205218315125,
"learning_rate": 1.9548412017880835e-05,
"loss": 0.7146,
"step": 476
},
{
"epoch": 0.4217506631299735,
"grad_norm": 0.3098430931568146,
"learning_rate": 1.954632521233268e-05,
"loss": 0.7328,
"step": 477
},
{
"epoch": 0.42263483642793986,
"grad_norm": 0.29075995087623596,
"learning_rate": 1.9544233708192096e-05,
"loss": 0.6952,
"step": 478
},
{
"epoch": 0.4235190097259063,
"grad_norm": 0.29583442211151123,
"learning_rate": 1.9542137506488496e-05,
"loss": 0.7083,
"step": 479
},
{
"epoch": 0.4244031830238727,
"grad_norm": 0.31121641397476196,
"learning_rate": 1.9540036608253615e-05,
"loss": 0.7123,
"step": 480
},
{
"epoch": 0.42528735632183906,
"grad_norm": 0.28241464495658875,
"learning_rate": 1.953793101452148e-05,
"loss": 0.717,
"step": 481
},
{
"epoch": 0.4261715296198055,
"grad_norm": 0.3004770874977112,
"learning_rate": 1.9535820726328457e-05,
"loss": 0.7061,
"step": 482
},
{
"epoch": 0.4270557029177719,
"grad_norm": 0.2910979390144348,
"learning_rate": 1.9533705744713186e-05,
"loss": 0.7286,
"step": 483
},
{
"epoch": 0.42793987621573826,
"grad_norm": 0.2879573106765747,
"learning_rate": 1.953158607071665e-05,
"loss": 0.7087,
"step": 484
},
{
"epoch": 0.4288240495137047,
"grad_norm": 0.29907339811325073,
"learning_rate": 1.9529461705382123e-05,
"loss": 0.7303,
"step": 485
},
{
"epoch": 0.4297082228116711,
"grad_norm": 0.3076381981372833,
"learning_rate": 1.95273326497552e-05,
"loss": 0.6827,
"step": 486
},
{
"epoch": 0.4305923961096375,
"grad_norm": 0.28718915581703186,
"learning_rate": 1.9525198904883765e-05,
"loss": 0.6988,
"step": 487
},
{
"epoch": 0.43147656940760387,
"grad_norm": 0.2863910496234894,
"learning_rate": 1.9523060471818033e-05,
"loss": 0.7419,
"step": 488
},
{
"epoch": 0.4323607427055703,
"grad_norm": 0.2820169925689697,
"learning_rate": 1.9520917351610516e-05,
"loss": 0.6944,
"step": 489
},
{
"epoch": 0.4332449160035367,
"grad_norm": 0.29458898305892944,
"learning_rate": 1.951876954531603e-05,
"loss": 0.7181,
"step": 490
},
{
"epoch": 0.43412908930150307,
"grad_norm": 0.29644542932510376,
"learning_rate": 1.95166170539917e-05,
"loss": 0.7025,
"step": 491
},
{
"epoch": 0.4350132625994695,
"grad_norm": 0.29215866327285767,
"learning_rate": 1.9514459878696962e-05,
"loss": 0.7378,
"step": 492
},
{
"epoch": 0.4358974358974359,
"grad_norm": 0.28991013765335083,
"learning_rate": 1.9512298020493553e-05,
"loss": 0.7042,
"step": 493
},
{
"epoch": 0.4367816091954023,
"grad_norm": 0.33385714888572693,
"learning_rate": 1.951013148044551e-05,
"loss": 0.6828,
"step": 494
},
{
"epoch": 0.4376657824933687,
"grad_norm": 0.295158714056015,
"learning_rate": 1.9507960259619184e-05,
"loss": 0.6925,
"step": 495
},
{
"epoch": 0.4385499557913351,
"grad_norm": 0.29772451519966125,
"learning_rate": 1.950578435908322e-05,
"loss": 0.7147,
"step": 496
},
{
"epoch": 0.4394341290893015,
"grad_norm": 0.3124242424964905,
"learning_rate": 1.9503603779908577e-05,
"loss": 0.7034,
"step": 497
},
{
"epoch": 0.4403183023872679,
"grad_norm": 0.2779264450073242,
"learning_rate": 1.950141852316851e-05,
"loss": 0.7158,
"step": 498
},
{
"epoch": 0.4412024756852343,
"grad_norm": 0.32939305901527405,
"learning_rate": 1.9499228589938574e-05,
"loss": 0.691,
"step": 499
},
{
"epoch": 0.4420866489832007,
"grad_norm": 0.29040637612342834,
"learning_rate": 1.9497033981296635e-05,
"loss": 0.701,
"step": 500
},
{
"epoch": 0.4420866489832007,
"eval_loss": 0.6279866099357605,
"eval_runtime": 648.3984,
"eval_samples_per_second": 9.303,
"eval_steps_per_second": 1.163,
"step": 500
},
{
"epoch": 0.44297082228116713,
"grad_norm": 0.33221468329429626,
"learning_rate": 1.9494834698322847e-05,
"loss": 0.7305,
"step": 501
},
{
"epoch": 0.4438549955791335,
"grad_norm": 0.3082229793071747,
"learning_rate": 1.949263074209968e-05,
"loss": 0.7262,
"step": 502
},
{
"epoch": 0.4447391688770999,
"grad_norm": 0.3733028471469879,
"learning_rate": 1.9490422113711885e-05,
"loss": 0.7181,
"step": 503
},
{
"epoch": 0.44562334217506633,
"grad_norm": 0.2793416380882263,
"learning_rate": 1.9488208814246537e-05,
"loss": 0.6879,
"step": 504
},
{
"epoch": 0.4465075154730327,
"grad_norm": 0.30525094270706177,
"learning_rate": 1.948599084479299e-05,
"loss": 0.6987,
"step": 505
},
{
"epoch": 0.4473916887709991,
"grad_norm": 0.30651694536209106,
"learning_rate": 1.9483768206442902e-05,
"loss": 0.6959,
"step": 506
},
{
"epoch": 0.4482758620689655,
"grad_norm": 0.30056047439575195,
"learning_rate": 1.948154090029023e-05,
"loss": 0.7108,
"step": 507
},
{
"epoch": 0.44916003536693194,
"grad_norm": 0.3235388696193695,
"learning_rate": 1.9479308927431235e-05,
"loss": 0.7228,
"step": 508
},
{
"epoch": 0.4500442086648983,
"grad_norm": 0.29917433857917786,
"learning_rate": 1.9477072288964464e-05,
"loss": 0.6923,
"step": 509
},
{
"epoch": 0.4509283819628647,
"grad_norm": 0.2985602021217346,
"learning_rate": 1.9474830985990763e-05,
"loss": 0.6973,
"step": 510
},
{
"epoch": 0.45181255526083114,
"grad_norm": 0.33893781900405884,
"learning_rate": 1.9472585019613278e-05,
"loss": 0.7017,
"step": 511
},
{
"epoch": 0.4526967285587975,
"grad_norm": 0.2920505106449127,
"learning_rate": 1.947033439093745e-05,
"loss": 0.717,
"step": 512
},
{
"epoch": 0.4535809018567639,
"grad_norm": 0.3140103220939636,
"learning_rate": 1.946807910107101e-05,
"loss": 0.7065,
"step": 513
},
{
"epoch": 0.45446507515473034,
"grad_norm": 0.2773943543434143,
"learning_rate": 1.9465819151123984e-05,
"loss": 0.6733,
"step": 514
},
{
"epoch": 0.45534924845269675,
"grad_norm": 0.3039877116680145,
"learning_rate": 1.9463554542208702e-05,
"loss": 0.7051,
"step": 515
},
{
"epoch": 0.4562334217506631,
"grad_norm": 0.2873627543449402,
"learning_rate": 1.9461285275439768e-05,
"loss": 0.6719,
"step": 516
},
{
"epoch": 0.45711759504862953,
"grad_norm": 0.2894982695579529,
"learning_rate": 1.94590113519341e-05,
"loss": 0.717,
"step": 517
},
{
"epoch": 0.45800176834659595,
"grad_norm": 0.3159572184085846,
"learning_rate": 1.9456732772810886e-05,
"loss": 0.686,
"step": 518
},
{
"epoch": 0.4588859416445623,
"grad_norm": 0.2958946228027344,
"learning_rate": 1.9454449539191625e-05,
"loss": 0.7326,
"step": 519
},
{
"epoch": 0.45977011494252873,
"grad_norm": 0.28420698642730713,
"learning_rate": 1.9452161652200096e-05,
"loss": 0.7134,
"step": 520
},
{
"epoch": 0.46065428824049515,
"grad_norm": 0.2878085672855377,
"learning_rate": 1.944986911296237e-05,
"loss": 0.6947,
"step": 521
},
{
"epoch": 0.46153846153846156,
"grad_norm": 0.2957565188407898,
"learning_rate": 1.944757192260681e-05,
"loss": 0.706,
"step": 522
},
{
"epoch": 0.4624226348364279,
"grad_norm": 0.29947903752326965,
"learning_rate": 1.9445270082264063e-05,
"loss": 0.6872,
"step": 523
},
{
"epoch": 0.46330680813439434,
"grad_norm": 0.28980112075805664,
"learning_rate": 1.9442963593067074e-05,
"loss": 0.6842,
"step": 524
},
{
"epoch": 0.46419098143236076,
"grad_norm": 0.32962915301322937,
"learning_rate": 1.944065245615107e-05,
"loss": 0.7115,
"step": 525
},
{
"epoch": 0.4650751547303271,
"grad_norm": 0.27733734250068665,
"learning_rate": 1.943833667265356e-05,
"loss": 0.6956,
"step": 526
},
{
"epoch": 0.46595932802829354,
"grad_norm": 0.30438053607940674,
"learning_rate": 1.9436016243714356e-05,
"loss": 0.7128,
"step": 527
},
{
"epoch": 0.46684350132625996,
"grad_norm": 0.28507542610168457,
"learning_rate": 1.943369117047554e-05,
"loss": 0.7085,
"step": 528
},
{
"epoch": 0.4677276746242264,
"grad_norm": 0.2784237265586853,
"learning_rate": 1.943136145408149e-05,
"loss": 0.7075,
"step": 529
},
{
"epoch": 0.46861184792219274,
"grad_norm": 0.2838481366634369,
"learning_rate": 1.942902709567886e-05,
"loss": 0.6866,
"step": 530
},
{
"epoch": 0.46949602122015915,
"grad_norm": 0.2762492299079895,
"learning_rate": 1.9426688096416604e-05,
"loss": 0.703,
"step": 531
},
{
"epoch": 0.47038019451812557,
"grad_norm": 0.27736836671829224,
"learning_rate": 1.9424344457445944e-05,
"loss": 0.6967,
"step": 532
},
{
"epoch": 0.47126436781609193,
"grad_norm": 0.29390519857406616,
"learning_rate": 1.942199617992039e-05,
"loss": 0.6971,
"step": 533
},
{
"epoch": 0.47214854111405835,
"grad_norm": 0.26253819465637207,
"learning_rate": 1.9419643264995742e-05,
"loss": 0.6881,
"step": 534
},
{
"epoch": 0.47303271441202477,
"grad_norm": 0.2891993820667267,
"learning_rate": 1.9417285713830078e-05,
"loss": 0.7154,
"step": 535
},
{
"epoch": 0.4739168877099912,
"grad_norm": 0.2886184751987457,
"learning_rate": 1.9414923527583757e-05,
"loss": 0.6982,
"step": 536
},
{
"epoch": 0.47480106100795755,
"grad_norm": 0.2831715941429138,
"learning_rate": 1.941255670741942e-05,
"loss": 0.73,
"step": 537
},
{
"epoch": 0.47568523430592397,
"grad_norm": 0.38041529059410095,
"learning_rate": 1.941018525450199e-05,
"loss": 0.7059,
"step": 538
},
{
"epoch": 0.4765694076038904,
"grad_norm": 0.284328818321228,
"learning_rate": 1.9407809169998668e-05,
"loss": 0.7011,
"step": 539
},
{
"epoch": 0.47745358090185674,
"grad_norm": 0.3045811355113983,
"learning_rate": 1.9405428455078933e-05,
"loss": 0.7069,
"step": 540
},
{
"epoch": 0.47833775419982316,
"grad_norm": 0.27783140540122986,
"learning_rate": 1.940304311091455e-05,
"loss": 0.6981,
"step": 541
},
{
"epoch": 0.4792219274977896,
"grad_norm": 0.29825741052627563,
"learning_rate": 1.9400653138679558e-05,
"loss": 0.7213,
"step": 542
},
{
"epoch": 0.48010610079575594,
"grad_norm": 0.266789972782135,
"learning_rate": 1.9398258539550272e-05,
"loss": 0.7062,
"step": 543
},
{
"epoch": 0.48099027409372236,
"grad_norm": 0.3040013015270233,
"learning_rate": 1.9395859314705286e-05,
"loss": 0.6771,
"step": 544
},
{
"epoch": 0.4818744473916888,
"grad_norm": 0.2720315456390381,
"learning_rate": 1.939345546532547e-05,
"loss": 0.6829,
"step": 545
},
{
"epoch": 0.4827586206896552,
"grad_norm": 0.3095323443412781,
"learning_rate": 1.939104699259398e-05,
"loss": 0.6937,
"step": 546
},
{
"epoch": 0.48364279398762156,
"grad_norm": 0.30481135845184326,
"learning_rate": 1.938863389769623e-05,
"loss": 0.74,
"step": 547
},
{
"epoch": 0.484526967285588,
"grad_norm": 0.2900422513484955,
"learning_rate": 1.938621618181992e-05,
"loss": 0.7185,
"step": 548
},
{
"epoch": 0.4854111405835544,
"grad_norm": 0.2996307909488678,
"learning_rate": 1.938379384615502e-05,
"loss": 0.7154,
"step": 549
},
{
"epoch": 0.48629531388152075,
"grad_norm": 0.2834547758102417,
"learning_rate": 1.938136689189378e-05,
"loss": 0.6881,
"step": 550
},
{
"epoch": 0.48717948717948717,
"grad_norm": 0.3083645701408386,
"learning_rate": 1.9378935320230717e-05,
"loss": 0.6856,
"step": 551
},
{
"epoch": 0.4880636604774536,
"grad_norm": 0.2864820659160614,
"learning_rate": 1.9376499132362624e-05,
"loss": 0.7036,
"step": 552
},
{
"epoch": 0.48894783377542,
"grad_norm": 0.2748136818408966,
"learning_rate": 1.9374058329488562e-05,
"loss": 0.6709,
"step": 553
},
{
"epoch": 0.48983200707338637,
"grad_norm": 0.2682119309902191,
"learning_rate": 1.9371612912809872e-05,
"loss": 0.6905,
"step": 554
},
{
"epoch": 0.4907161803713528,
"grad_norm": 0.28333592414855957,
"learning_rate": 1.9369162883530154e-05,
"loss": 0.6686,
"step": 555
},
{
"epoch": 0.4916003536693192,
"grad_norm": 0.2658959925174713,
"learning_rate": 1.936670824285529e-05,
"loss": 0.7025,
"step": 556
},
{
"epoch": 0.49248452696728556,
"grad_norm": 0.2823108434677124,
"learning_rate": 1.9364248991993422e-05,
"loss": 0.7039,
"step": 557
},
{
"epoch": 0.493368700265252,
"grad_norm": 0.28012388944625854,
"learning_rate": 1.9361785132154965e-05,
"loss": 0.7234,
"step": 558
},
{
"epoch": 0.4942528735632184,
"grad_norm": 0.2878527045249939,
"learning_rate": 1.9359316664552607e-05,
"loss": 0.7002,
"step": 559
},
{
"epoch": 0.4951370468611848,
"grad_norm": 0.29530948400497437,
"learning_rate": 1.9356843590401295e-05,
"loss": 0.7033,
"step": 560
},
{
"epoch": 0.4960212201591512,
"grad_norm": 0.27332285046577454,
"learning_rate": 1.9354365910918252e-05,
"loss": 0.7104,
"step": 561
},
{
"epoch": 0.4969053934571176,
"grad_norm": 0.30689048767089844,
"learning_rate": 1.9351883627322962e-05,
"loss": 0.6899,
"step": 562
},
{
"epoch": 0.497789566755084,
"grad_norm": 0.29162970185279846,
"learning_rate": 1.9349396740837175e-05,
"loss": 0.7139,
"step": 563
},
{
"epoch": 0.4986737400530504,
"grad_norm": 0.3059779107570648,
"learning_rate": 1.934690525268491e-05,
"loss": 0.6802,
"step": 564
},
{
"epoch": 0.4995579133510168,
"grad_norm": 0.27568885684013367,
"learning_rate": 1.9344409164092446e-05,
"loss": 0.6956,
"step": 565
},
{
"epoch": 0.5004420866489832,
"grad_norm": 0.31137195229530334,
"learning_rate": 1.934190847628833e-05,
"loss": 0.7047,
"step": 566
},
{
"epoch": 0.5013262599469496,
"grad_norm": 0.29445040225982666,
"learning_rate": 1.933940319050338e-05,
"loss": 0.7056,
"step": 567
},
{
"epoch": 0.502210433244916,
"grad_norm": 0.32383260130882263,
"learning_rate": 1.933689330797066e-05,
"loss": 0.6759,
"step": 568
},
{
"epoch": 0.5030946065428824,
"grad_norm": 0.3098364770412445,
"learning_rate": 1.933437882992551e-05,
"loss": 0.714,
"step": 569
},
{
"epoch": 0.5039787798408488,
"grad_norm": 0.3067183792591095,
"learning_rate": 1.9331859757605524e-05,
"loss": 0.6885,
"step": 570
},
{
"epoch": 0.5048629531388152,
"grad_norm": 0.3190740644931793,
"learning_rate": 1.932933609225056e-05,
"loss": 0.6922,
"step": 571
},
{
"epoch": 0.5057471264367817,
"grad_norm": 0.28036412596702576,
"learning_rate": 1.9326807835102744e-05,
"loss": 0.6725,
"step": 572
},
{
"epoch": 0.506631299734748,
"grad_norm": 0.3032734990119934,
"learning_rate": 1.9324274987406452e-05,
"loss": 0.7353,
"step": 573
},
{
"epoch": 0.5075154730327144,
"grad_norm": 0.28752991557121277,
"learning_rate": 1.932173755040832e-05,
"loss": 0.7111,
"step": 574
},
{
"epoch": 0.5083996463306808,
"grad_norm": 0.29090917110443115,
"learning_rate": 1.9319195525357247e-05,
"loss": 0.7142,
"step": 575
},
{
"epoch": 0.5092838196286472,
"grad_norm": 0.3057839572429657,
"learning_rate": 1.931664891350439e-05,
"loss": 0.6988,
"step": 576
},
{
"epoch": 0.5101679929266136,
"grad_norm": 0.28443601727485657,
"learning_rate": 1.9314097716103162e-05,
"loss": 0.694,
"step": 577
},
{
"epoch": 0.51105216622458,
"grad_norm": 0.3303302526473999,
"learning_rate": 1.931154193440923e-05,
"loss": 0.6722,
"step": 578
},
{
"epoch": 0.5119363395225465,
"grad_norm": 0.32292038202285767,
"learning_rate": 1.9308981569680525e-05,
"loss": 0.7161,
"step": 579
},
{
"epoch": 0.5128205128205128,
"grad_norm": 0.28932854533195496,
"learning_rate": 1.9306416623177233e-05,
"loss": 0.7051,
"step": 580
},
{
"epoch": 0.5137046861184792,
"grad_norm": 0.3282896876335144,
"learning_rate": 1.930384709616178e-05,
"loss": 0.6936,
"step": 581
},
{
"epoch": 0.5145888594164456,
"grad_norm": 0.29452642798423767,
"learning_rate": 1.9301272989898865e-05,
"loss": 0.7283,
"step": 582
},
{
"epoch": 0.515473032714412,
"grad_norm": 0.30331817269325256,
"learning_rate": 1.9298694305655433e-05,
"loss": 0.7272,
"step": 583
},
{
"epoch": 0.5163572060123784,
"grad_norm": 0.31022608280181885,
"learning_rate": 1.9296111044700683e-05,
"loss": 0.7053,
"step": 584
},
{
"epoch": 0.5172413793103449,
"grad_norm": 0.29611602425575256,
"learning_rate": 1.929352320830607e-05,
"loss": 0.6648,
"step": 585
},
{
"epoch": 0.5181255526083113,
"grad_norm": 0.3030484914779663,
"learning_rate": 1.929093079774529e-05,
"loss": 0.6881,
"step": 586
},
{
"epoch": 0.5190097259062776,
"grad_norm": 0.2776247262954712,
"learning_rate": 1.9288333814294304e-05,
"loss": 0.6932,
"step": 587
},
{
"epoch": 0.519893899204244,
"grad_norm": 0.2904685139656067,
"learning_rate": 1.9285732259231315e-05,
"loss": 0.7052,
"step": 588
},
{
"epoch": 0.5207780725022104,
"grad_norm": 0.28331536054611206,
"learning_rate": 1.928312613383678e-05,
"loss": 0.7047,
"step": 589
},
{
"epoch": 0.5216622458001768,
"grad_norm": 0.29390013217926025,
"learning_rate": 1.92805154393934e-05,
"loss": 0.7293,
"step": 590
},
{
"epoch": 0.5225464190981433,
"grad_norm": 0.2958281636238098,
"learning_rate": 1.9277900177186137e-05,
"loss": 0.7048,
"step": 591
},
{
"epoch": 0.5234305923961097,
"grad_norm": 0.28478682041168213,
"learning_rate": 1.9275280348502186e-05,
"loss": 0.682,
"step": 592
},
{
"epoch": 0.5243147656940761,
"grad_norm": 0.30639228224754333,
"learning_rate": 1.9272655954631e-05,
"loss": 0.6937,
"step": 593
},
{
"epoch": 0.5251989389920424,
"grad_norm": 0.29246920347213745,
"learning_rate": 1.9270026996864278e-05,
"loss": 0.7221,
"step": 594
},
{
"epoch": 0.5260831122900088,
"grad_norm": 0.3105323612689972,
"learning_rate": 1.9267393476495957e-05,
"loss": 0.6994,
"step": 595
},
{
"epoch": 0.5269672855879752,
"grad_norm": 0.2895874083042145,
"learning_rate": 1.926475539482223e-05,
"loss": 0.6984,
"step": 596
},
{
"epoch": 0.5278514588859416,
"grad_norm": 0.2984839379787445,
"learning_rate": 1.926211275314153e-05,
"loss": 0.7161,
"step": 597
},
{
"epoch": 0.5287356321839081,
"grad_norm": 0.3109927773475647,
"learning_rate": 1.9259465552754537e-05,
"loss": 0.7276,
"step": 598
},
{
"epoch": 0.5296198054818745,
"grad_norm": 0.3024746775627136,
"learning_rate": 1.925681379496417e-05,
"loss": 0.705,
"step": 599
},
{
"epoch": 0.5305039787798409,
"grad_norm": 0.29454290866851807,
"learning_rate": 1.9254157481075593e-05,
"loss": 0.7109,
"step": 600
},
{
"epoch": 0.5313881520778072,
"grad_norm": 0.29110249876976013,
"learning_rate": 1.9251496612396218e-05,
"loss": 0.699,
"step": 601
},
{
"epoch": 0.5322723253757736,
"grad_norm": 0.2820034921169281,
"learning_rate": 1.9248831190235688e-05,
"loss": 0.6712,
"step": 602
},
{
"epoch": 0.53315649867374,
"grad_norm": 0.2961048483848572,
"learning_rate": 1.9246161215905895e-05,
"loss": 0.6989,
"step": 603
},
{
"epoch": 0.5340406719717065,
"grad_norm": 0.2944365441799164,
"learning_rate": 1.9243486690720977e-05,
"loss": 0.6976,
"step": 604
},
{
"epoch": 0.5349248452696729,
"grad_norm": 0.320938378572464,
"learning_rate": 1.9240807615997297e-05,
"loss": 0.6823,
"step": 605
},
{
"epoch": 0.5358090185676393,
"grad_norm": 0.2925585210323334,
"learning_rate": 1.923812399305347e-05,
"loss": 0.6837,
"step": 606
},
{
"epoch": 0.5366931918656057,
"grad_norm": 0.3049519658088684,
"learning_rate": 1.9235435823210337e-05,
"loss": 0.6984,
"step": 607
},
{
"epoch": 0.537577365163572,
"grad_norm": 0.27574601769447327,
"learning_rate": 1.9232743107790994e-05,
"loss": 0.6988,
"step": 608
},
{
"epoch": 0.5384615384615384,
"grad_norm": 0.30247947573661804,
"learning_rate": 1.9230045848120758e-05,
"loss": 0.7428,
"step": 609
},
{
"epoch": 0.5393457117595049,
"grad_norm": 0.2825312614440918,
"learning_rate": 1.9227344045527196e-05,
"loss": 0.7207,
"step": 610
},
{
"epoch": 0.5402298850574713,
"grad_norm": 0.29073336720466614,
"learning_rate": 1.9224637701340096e-05,
"loss": 0.7267,
"step": 611
},
{
"epoch": 0.5411140583554377,
"grad_norm": 0.2754007577896118,
"learning_rate": 1.9221926816891498e-05,
"loss": 0.7058,
"step": 612
},
{
"epoch": 0.5419982316534041,
"grad_norm": 0.28183868527412415,
"learning_rate": 1.921921139351566e-05,
"loss": 0.6965,
"step": 613
},
{
"epoch": 0.5428824049513705,
"grad_norm": 0.313535213470459,
"learning_rate": 1.921649143254909e-05,
"loss": 0.7357,
"step": 614
},
{
"epoch": 0.5437665782493368,
"grad_norm": 0.28740549087524414,
"learning_rate": 1.921376693533052e-05,
"loss": 0.6769,
"step": 615
},
{
"epoch": 0.5446507515473032,
"grad_norm": 0.32442110776901245,
"learning_rate": 1.9211037903200912e-05,
"loss": 0.6905,
"step": 616
},
{
"epoch": 0.5455349248452697,
"grad_norm": 0.28533607721328735,
"learning_rate": 1.920830433750347e-05,
"loss": 0.7277,
"step": 617
},
{
"epoch": 0.5464190981432361,
"grad_norm": 0.297689288854599,
"learning_rate": 1.920556623958362e-05,
"loss": 0.6766,
"step": 618
},
{
"epoch": 0.5473032714412025,
"grad_norm": 0.2790350317955017,
"learning_rate": 1.9202823610789026e-05,
"loss": 0.692,
"step": 619
},
{
"epoch": 0.5481874447391689,
"grad_norm": 0.3083040714263916,
"learning_rate": 1.9200076452469575e-05,
"loss": 0.7122,
"step": 620
},
{
"epoch": 0.5490716180371353,
"grad_norm": 0.28327423334121704,
"learning_rate": 1.919732476597739e-05,
"loss": 0.6667,
"step": 621
},
{
"epoch": 0.5499557913351016,
"grad_norm": 0.3095477223396301,
"learning_rate": 1.919456855266682e-05,
"loss": 0.7037,
"step": 622
},
{
"epoch": 0.5508399646330681,
"grad_norm": 0.29425105452537537,
"learning_rate": 1.9191807813894437e-05,
"loss": 0.6763,
"step": 623
},
{
"epoch": 0.5517241379310345,
"grad_norm": 0.2899615168571472,
"learning_rate": 1.918904255101905e-05,
"loss": 0.6881,
"step": 624
},
{
"epoch": 0.5526083112290009,
"grad_norm": 0.2945635914802551,
"learning_rate": 1.9186272765401688e-05,
"loss": 0.6731,
"step": 625
},
{
"epoch": 0.5534924845269673,
"grad_norm": 0.2761707901954651,
"learning_rate": 1.918349845840561e-05,
"loss": 0.6948,
"step": 626
},
{
"epoch": 0.5543766578249337,
"grad_norm": 0.29618117213249207,
"learning_rate": 1.9180719631396295e-05,
"loss": 0.7115,
"step": 627
},
{
"epoch": 0.5552608311229,
"grad_norm": 0.2709069848060608,
"learning_rate": 1.917793628574145e-05,
"loss": 0.6754,
"step": 628
},
{
"epoch": 0.5561450044208665,
"grad_norm": 0.301939457654953,
"learning_rate": 1.9175148422811007e-05,
"loss": 0.7116,
"step": 629
},
{
"epoch": 0.5570291777188329,
"grad_norm": 0.27732163667678833,
"learning_rate": 1.9172356043977123e-05,
"loss": 0.7138,
"step": 630
},
{
"epoch": 0.5579133510167993,
"grad_norm": 0.3195061981678009,
"learning_rate": 1.9169559150614176e-05,
"loss": 0.6862,
"step": 631
},
{
"epoch": 0.5587975243147657,
"grad_norm": 0.29815909266471863,
"learning_rate": 1.9166757744098756e-05,
"loss": 0.7419,
"step": 632
},
{
"epoch": 0.5596816976127321,
"grad_norm": 0.31621137261390686,
"learning_rate": 1.9163951825809694e-05,
"loss": 0.6888,
"step": 633
},
{
"epoch": 0.5605658709106985,
"grad_norm": 0.29336240887641907,
"learning_rate": 1.9161141397128023e-05,
"loss": 0.7272,
"step": 634
},
{
"epoch": 0.5614500442086648,
"grad_norm": 0.2909793257713318,
"learning_rate": 1.9158326459437007e-05,
"loss": 0.6858,
"step": 635
},
{
"epoch": 0.5623342175066313,
"grad_norm": 0.28055310249328613,
"learning_rate": 1.9155507014122125e-05,
"loss": 0.6752,
"step": 636
},
{
"epoch": 0.5632183908045977,
"grad_norm": 0.3142622411251068,
"learning_rate": 1.915268306257108e-05,
"loss": 0.6889,
"step": 637
},
{
"epoch": 0.5641025641025641,
"grad_norm": 0.2958996891975403,
"learning_rate": 1.9149854606173788e-05,
"loss": 0.6927,
"step": 638
},
{
"epoch": 0.5649867374005305,
"grad_norm": 0.29973822832107544,
"learning_rate": 1.9147021646322377e-05,
"loss": 0.6847,
"step": 639
},
{
"epoch": 0.5658709106984969,
"grad_norm": 0.3103879392147064,
"learning_rate": 1.9144184184411204e-05,
"loss": 0.7033,
"step": 640
},
{
"epoch": 0.5667550839964633,
"grad_norm": 0.29941630363464355,
"learning_rate": 1.914134222183683e-05,
"loss": 0.6792,
"step": 641
},
{
"epoch": 0.5676392572944297,
"grad_norm": 0.3001434803009033,
"learning_rate": 1.9138495759998045e-05,
"loss": 0.697,
"step": 642
},
{
"epoch": 0.5685234305923961,
"grad_norm": 0.2750704288482666,
"learning_rate": 1.9135644800295835e-05,
"loss": 0.692,
"step": 643
},
{
"epoch": 0.5694076038903625,
"grad_norm": 0.29316946864128113,
"learning_rate": 1.9132789344133414e-05,
"loss": 0.7119,
"step": 644
},
{
"epoch": 0.5702917771883289,
"grad_norm": 0.30747634172439575,
"learning_rate": 1.9129929392916206e-05,
"loss": 0.7131,
"step": 645
},
{
"epoch": 0.5711759504862953,
"grad_norm": 0.2933180332183838,
"learning_rate": 1.9127064948051845e-05,
"loss": 0.6902,
"step": 646
},
{
"epoch": 0.5720601237842617,
"grad_norm": 0.28232842683792114,
"learning_rate": 1.9124196010950182e-05,
"loss": 0.6766,
"step": 647
},
{
"epoch": 0.5729442970822282,
"grad_norm": 0.30518031120300293,
"learning_rate": 1.912132258302327e-05,
"loss": 0.6976,
"step": 648
},
{
"epoch": 0.5738284703801945,
"grad_norm": 0.2834108769893646,
"learning_rate": 1.9118444665685382e-05,
"loss": 0.683,
"step": 649
},
{
"epoch": 0.5747126436781609,
"grad_norm": 0.30174678564071655,
"learning_rate": 1.9115562260352988e-05,
"loss": 0.6633,
"step": 650
},
{
"epoch": 0.5755968169761273,
"grad_norm": 0.28981122374534607,
"learning_rate": 1.9112675368444782e-05,
"loss": 0.6933,
"step": 651
},
{
"epoch": 0.5764809902740937,
"grad_norm": 0.29172584414482117,
"learning_rate": 1.9109783991381664e-05,
"loss": 0.6873,
"step": 652
},
{
"epoch": 0.5773651635720601,
"grad_norm": 0.28979557752609253,
"learning_rate": 1.9106888130586726e-05,
"loss": 0.6807,
"step": 653
},
{
"epoch": 0.5782493368700266,
"grad_norm": 0.2666468620300293,
"learning_rate": 1.9103987787485284e-05,
"loss": 0.6796,
"step": 654
},
{
"epoch": 0.579133510167993,
"grad_norm": 0.28257355093955994,
"learning_rate": 1.910108296350485e-05,
"loss": 0.692,
"step": 655
},
{
"epoch": 0.5800176834659593,
"grad_norm": 0.2792419195175171,
"learning_rate": 1.9098173660075146e-05,
"loss": 0.7034,
"step": 656
},
{
"epoch": 0.5809018567639257,
"grad_norm": 0.2725202143192291,
"learning_rate": 1.9095259878628104e-05,
"loss": 0.6858,
"step": 657
},
{
"epoch": 0.5817860300618921,
"grad_norm": 0.2694140374660492,
"learning_rate": 1.9092341620597844e-05,
"loss": 0.6867,
"step": 658
},
{
"epoch": 0.5826702033598585,
"grad_norm": 0.2723879814147949,
"learning_rate": 1.908941888742071e-05,
"loss": 0.7009,
"step": 659
},
{
"epoch": 0.583554376657825,
"grad_norm": 0.290936678647995,
"learning_rate": 1.908649168053523e-05,
"loss": 0.7218,
"step": 660
},
{
"epoch": 0.5844385499557914,
"grad_norm": 0.26363179087638855,
"learning_rate": 1.9083560001382146e-05,
"loss": 0.7063,
"step": 661
},
{
"epoch": 0.5853227232537578,
"grad_norm": 0.27006879448890686,
"learning_rate": 1.9080623851404394e-05,
"loss": 0.681,
"step": 662
},
{
"epoch": 0.5862068965517241,
"grad_norm": 0.26935040950775146,
"learning_rate": 1.9077683232047118e-05,
"loss": 0.678,
"step": 663
},
{
"epoch": 0.5870910698496905,
"grad_norm": 0.2758124768733978,
"learning_rate": 1.9074738144757652e-05,
"loss": 0.7036,
"step": 664
},
{
"epoch": 0.5879752431476569,
"grad_norm": 0.2624821364879608,
"learning_rate": 1.9071788590985536e-05,
"loss": 0.683,
"step": 665
},
{
"epoch": 0.5888594164456233,
"grad_norm": 0.27200961112976074,
"learning_rate": 1.906883457218251e-05,
"loss": 0.7113,
"step": 666
},
{
"epoch": 0.5897435897435898,
"grad_norm": 0.33828431367874146,
"learning_rate": 1.9065876089802505e-05,
"loss": 0.6973,
"step": 667
},
{
"epoch": 0.5906277630415562,
"grad_norm": 0.2844296395778656,
"learning_rate": 1.906291314530165e-05,
"loss": 0.7057,
"step": 668
},
{
"epoch": 0.5915119363395226,
"grad_norm": 0.2821897864341736,
"learning_rate": 1.905994574013828e-05,
"loss": 0.6807,
"step": 669
},
{
"epoch": 0.5923961096374889,
"grad_norm": 0.2591938376426697,
"learning_rate": 1.9056973875772914e-05,
"loss": 0.6683,
"step": 670
},
{
"epoch": 0.5932802829354553,
"grad_norm": 0.2734562158584595,
"learning_rate": 1.9053997553668266e-05,
"loss": 0.7318,
"step": 671
},
{
"epoch": 0.5941644562334217,
"grad_norm": 0.27932876348495483,
"learning_rate": 1.905101677528925e-05,
"loss": 0.6964,
"step": 672
},
{
"epoch": 0.5950486295313882,
"grad_norm": 0.2744494676589966,
"learning_rate": 1.904803154210298e-05,
"loss": 0.6765,
"step": 673
},
{
"epoch": 0.5959328028293546,
"grad_norm": 0.27666282653808594,
"learning_rate": 1.9045041855578737e-05,
"loss": 0.691,
"step": 674
},
{
"epoch": 0.596816976127321,
"grad_norm": 0.2819247543811798,
"learning_rate": 1.9042047717188028e-05,
"loss": 0.7024,
"step": 675
},
{
"epoch": 0.5977011494252874,
"grad_norm": 0.2798329293727875,
"learning_rate": 1.9039049128404522e-05,
"loss": 0.6981,
"step": 676
},
{
"epoch": 0.5985853227232537,
"grad_norm": 0.3367370665073395,
"learning_rate": 1.9036046090704094e-05,
"loss": 0.6923,
"step": 677
},
{
"epoch": 0.5994694960212201,
"grad_norm": 0.30731940269470215,
"learning_rate": 1.9033038605564806e-05,
"loss": 0.701,
"step": 678
},
{
"epoch": 0.6003536693191865,
"grad_norm": 0.2731819450855255,
"learning_rate": 1.9030026674466905e-05,
"loss": 0.7025,
"step": 679
},
{
"epoch": 0.601237842617153,
"grad_norm": 0.3132810592651367,
"learning_rate": 1.902701029889283e-05,
"loss": 0.6996,
"step": 680
},
{
"epoch": 0.6021220159151194,
"grad_norm": 0.3042294383049011,
"learning_rate": 1.9023989480327207e-05,
"loss": 0.7064,
"step": 681
},
{
"epoch": 0.6030061892130858,
"grad_norm": 0.2661696672439575,
"learning_rate": 1.902096422025685e-05,
"loss": 0.6892,
"step": 682
},
{
"epoch": 0.6038903625110522,
"grad_norm": 0.2820367217063904,
"learning_rate": 1.901793452017076e-05,
"loss": 0.6842,
"step": 683
},
{
"epoch": 0.6047745358090185,
"grad_norm": 0.2975214719772339,
"learning_rate": 1.901490038156011e-05,
"loss": 0.721,
"step": 684
},
{
"epoch": 0.6056587091069849,
"grad_norm": 0.30187922716140747,
"learning_rate": 1.901186180591828e-05,
"loss": 0.6938,
"step": 685
},
{
"epoch": 0.6065428824049514,
"grad_norm": 0.2695740759372711,
"learning_rate": 1.900881879474082e-05,
"loss": 0.686,
"step": 686
},
{
"epoch": 0.6074270557029178,
"grad_norm": 0.30245211720466614,
"learning_rate": 1.900577134952546e-05,
"loss": 0.7135,
"step": 687
},
{
"epoch": 0.6083112290008842,
"grad_norm": 0.2724456787109375,
"learning_rate": 1.900271947177212e-05,
"loss": 0.7019,
"step": 688
},
{
"epoch": 0.6091954022988506,
"grad_norm": 0.28307440876960754,
"learning_rate": 1.89996631629829e-05,
"loss": 0.6654,
"step": 689
},
{
"epoch": 0.610079575596817,
"grad_norm": 0.28724274039268494,
"learning_rate": 1.8996602424662085e-05,
"loss": 0.7105,
"step": 690
},
{
"epoch": 0.6109637488947833,
"grad_norm": 0.2776222825050354,
"learning_rate": 1.899353725831613e-05,
"loss": 0.6879,
"step": 691
},
{
"epoch": 0.6118479221927497,
"grad_norm": 0.26721012592315674,
"learning_rate": 1.8990467665453675e-05,
"loss": 0.7068,
"step": 692
},
{
"epoch": 0.6127320954907162,
"grad_norm": 0.30207639932632446,
"learning_rate": 1.8987393647585543e-05,
"loss": 0.6854,
"step": 693
},
{
"epoch": 0.6136162687886826,
"grad_norm": 0.2716652750968933,
"learning_rate": 1.8984315206224725e-05,
"loss": 0.7028,
"step": 694
},
{
"epoch": 0.614500442086649,
"grad_norm": 0.2875162959098816,
"learning_rate": 1.89812323428864e-05,
"loss": 0.7,
"step": 695
},
{
"epoch": 0.6153846153846154,
"grad_norm": 0.28038549423217773,
"learning_rate": 1.897814505908791e-05,
"loss": 0.6884,
"step": 696
},
{
"epoch": 0.6162687886825818,
"grad_norm": 0.28570565581321716,
"learning_rate": 1.897505335634879e-05,
"loss": 0.6659,
"step": 697
},
{
"epoch": 0.6171529619805481,
"grad_norm": 0.28716301918029785,
"learning_rate": 1.8971957236190736e-05,
"loss": 0.6733,
"step": 698
},
{
"epoch": 0.6180371352785146,
"grad_norm": 0.2640667259693146,
"learning_rate": 1.8968856700137624e-05,
"loss": 0.6831,
"step": 699
},
{
"epoch": 0.618921308576481,
"grad_norm": 0.3008674383163452,
"learning_rate": 1.8965751749715502e-05,
"loss": 0.6922,
"step": 700
},
{
"epoch": 0.6198054818744474,
"grad_norm": 0.26678988337516785,
"learning_rate": 1.896264238645259e-05,
"loss": 0.6858,
"step": 701
},
{
"epoch": 0.6206896551724138,
"grad_norm": 0.28425052762031555,
"learning_rate": 1.895952861187929e-05,
"loss": 0.6565,
"step": 702
},
{
"epoch": 0.6215738284703802,
"grad_norm": 0.2859535813331604,
"learning_rate": 1.8956410427528155e-05,
"loss": 0.7085,
"step": 703
},
{
"epoch": 0.6224580017683466,
"grad_norm": 0.2785528898239136,
"learning_rate": 1.8953287834933923e-05,
"loss": 0.6919,
"step": 704
},
{
"epoch": 0.623342175066313,
"grad_norm": 0.26437628269195557,
"learning_rate": 1.89501608356335e-05,
"loss": 0.6829,
"step": 705
},
{
"epoch": 0.6242263483642794,
"grad_norm": 0.27315545082092285,
"learning_rate": 1.8947029431165963e-05,
"loss": 0.6714,
"step": 706
},
{
"epoch": 0.6251105216622458,
"grad_norm": 0.27934756875038147,
"learning_rate": 1.8943893623072544e-05,
"loss": 0.6716,
"step": 707
},
{
"epoch": 0.6259946949602122,
"grad_norm": 0.28211453557014465,
"learning_rate": 1.8940753412896664e-05,
"loss": 0.7102,
"step": 708
},
{
"epoch": 0.6268788682581786,
"grad_norm": 0.29011833667755127,
"learning_rate": 1.893760880218389e-05,
"loss": 0.6923,
"step": 709
},
{
"epoch": 0.627763041556145,
"grad_norm": 0.2963981628417969,
"learning_rate": 1.8934459792481963e-05,
"loss": 0.7,
"step": 710
},
{
"epoch": 0.6286472148541115,
"grad_norm": 0.26882535219192505,
"learning_rate": 1.8931306385340793e-05,
"loss": 0.6939,
"step": 711
},
{
"epoch": 0.6295313881520778,
"grad_norm": 0.279835969209671,
"learning_rate": 1.892814858231245e-05,
"loss": 0.6766,
"step": 712
},
{
"epoch": 0.6304155614500442,
"grad_norm": 0.27672797441482544,
"learning_rate": 1.8924986384951173e-05,
"loss": 0.6859,
"step": 713
},
{
"epoch": 0.6312997347480106,
"grad_norm": 0.27796831727027893,
"learning_rate": 1.8921819794813353e-05,
"loss": 0.6959,
"step": 714
},
{
"epoch": 0.632183908045977,
"grad_norm": 0.28798171877861023,
"learning_rate": 1.891864881345755e-05,
"loss": 0.703,
"step": 715
},
{
"epoch": 0.6330680813439434,
"grad_norm": 0.28192129731178284,
"learning_rate": 1.891547344244449e-05,
"loss": 0.6922,
"step": 716
},
{
"epoch": 0.6339522546419099,
"grad_norm": 0.3002018928527832,
"learning_rate": 1.891229368333705e-05,
"loss": 0.6739,
"step": 717
},
{
"epoch": 0.6348364279398763,
"grad_norm": 0.3045368790626526,
"learning_rate": 1.8909109537700275e-05,
"loss": 0.6876,
"step": 718
},
{
"epoch": 0.6357206012378426,
"grad_norm": 0.28418785333633423,
"learning_rate": 1.890592100710136e-05,
"loss": 0.6894,
"step": 719
},
{
"epoch": 0.636604774535809,
"grad_norm": 0.2703597843647003,
"learning_rate": 1.8902728093109668e-05,
"loss": 0.7017,
"step": 720
},
{
"epoch": 0.6374889478337754,
"grad_norm": 0.29267582297325134,
"learning_rate": 1.8899530797296714e-05,
"loss": 0.6751,
"step": 721
},
{
"epoch": 0.6383731211317418,
"grad_norm": 0.25415948033332825,
"learning_rate": 1.8896329121236165e-05,
"loss": 0.6899,
"step": 722
},
{
"epoch": 0.6392572944297082,
"grad_norm": 0.3276171386241913,
"learning_rate": 1.8893123066503857e-05,
"loss": 0.695,
"step": 723
},
{
"epoch": 0.6401414677276747,
"grad_norm": 0.27536094188690186,
"learning_rate": 1.8889912634677772e-05,
"loss": 0.7217,
"step": 724
},
{
"epoch": 0.6410256410256411,
"grad_norm": 0.3073112666606903,
"learning_rate": 1.8886697827338044e-05,
"loss": 0.6784,
"step": 725
},
{
"epoch": 0.6419098143236074,
"grad_norm": 0.29655781388282776,
"learning_rate": 1.888347864606697e-05,
"loss": 0.7064,
"step": 726
},
{
"epoch": 0.6427939876215738,
"grad_norm": 0.2843087613582611,
"learning_rate": 1.888025509244899e-05,
"loss": 0.6845,
"step": 727
},
{
"epoch": 0.6436781609195402,
"grad_norm": 0.296983540058136,
"learning_rate": 1.88770271680707e-05,
"loss": 0.7187,
"step": 728
},
{
"epoch": 0.6445623342175066,
"grad_norm": 0.27124589681625366,
"learning_rate": 1.8873794874520853e-05,
"loss": 0.6786,
"step": 729
},
{
"epoch": 0.6454465075154731,
"grad_norm": 0.2964281439781189,
"learning_rate": 1.887055821339034e-05,
"loss": 0.6986,
"step": 730
},
{
"epoch": 0.6463306808134395,
"grad_norm": 0.2887691855430603,
"learning_rate": 1.8867317186272213e-05,
"loss": 0.6989,
"step": 731
},
{
"epoch": 0.6472148541114059,
"grad_norm": 0.2747102677822113,
"learning_rate": 1.886407179476167e-05,
"loss": 0.6988,
"step": 732
},
{
"epoch": 0.6480990274093722,
"grad_norm": 0.27963218092918396,
"learning_rate": 1.886082204045605e-05,
"loss": 0.7021,
"step": 733
},
{
"epoch": 0.6489832007073386,
"grad_norm": 0.28080326318740845,
"learning_rate": 1.8857567924954852e-05,
"loss": 0.6962,
"step": 734
},
{
"epoch": 0.649867374005305,
"grad_norm": 0.2944171130657196,
"learning_rate": 1.8854309449859708e-05,
"loss": 0.6836,
"step": 735
},
{
"epoch": 0.6507515473032714,
"grad_norm": 0.2782329320907593,
"learning_rate": 1.8851046616774405e-05,
"loss": 0.6824,
"step": 736
},
{
"epoch": 0.6516357206012379,
"grad_norm": 0.2737736999988556,
"learning_rate": 1.8847779427304874e-05,
"loss": 0.656,
"step": 737
},
{
"epoch": 0.6525198938992043,
"grad_norm": 0.2638976573944092,
"learning_rate": 1.8844507883059188e-05,
"loss": 0.6684,
"step": 738
},
{
"epoch": 0.6534040671971706,
"grad_norm": 0.3071131706237793,
"learning_rate": 1.884123198564756e-05,
"loss": 0.6925,
"step": 739
},
{
"epoch": 0.654288240495137,
"grad_norm": 0.28078922629356384,
"learning_rate": 1.8837951736682353e-05,
"loss": 0.6977,
"step": 740
},
{
"epoch": 0.6551724137931034,
"grad_norm": 0.2772842049598694,
"learning_rate": 1.883466713777807e-05,
"loss": 0.6856,
"step": 741
},
{
"epoch": 0.6560565870910698,
"grad_norm": 0.28466105461120605,
"learning_rate": 1.8831378190551344e-05,
"loss": 0.7056,
"step": 742
},
{
"epoch": 0.6569407603890363,
"grad_norm": 0.2716815173625946,
"learning_rate": 1.882808489662097e-05,
"loss": 0.6848,
"step": 743
},
{
"epoch": 0.6578249336870027,
"grad_norm": 0.29176732897758484,
"learning_rate": 1.882478725760786e-05,
"loss": 0.6797,
"step": 744
},
{
"epoch": 0.6587091069849691,
"grad_norm": 0.29607975482940674,
"learning_rate": 1.882148527513508e-05,
"loss": 0.703,
"step": 745
},
{
"epoch": 0.6595932802829354,
"grad_norm": 0.27619922161102295,
"learning_rate": 1.8818178950827825e-05,
"loss": 0.7049,
"step": 746
},
{
"epoch": 0.6604774535809018,
"grad_norm": 0.28621983528137207,
"learning_rate": 1.881486828631343e-05,
"loss": 0.6947,
"step": 747
},
{
"epoch": 0.6613616268788682,
"grad_norm": 0.294779896736145,
"learning_rate": 1.8811553283221366e-05,
"loss": 0.6911,
"step": 748
},
{
"epoch": 0.6622458001768347,
"grad_norm": 0.28222110867500305,
"learning_rate": 1.8808233943183243e-05,
"loss": 0.6776,
"step": 749
},
{
"epoch": 0.6631299734748011,
"grad_norm": 0.2852027416229248,
"learning_rate": 1.8804910267832798e-05,
"loss": 0.68,
"step": 750
},
{
"epoch": 0.6640141467727675,
"grad_norm": 0.2977186441421509,
"learning_rate": 1.880158225880591e-05,
"loss": 0.6986,
"step": 751
},
{
"epoch": 0.6648983200707339,
"grad_norm": 0.283395379781723,
"learning_rate": 1.8798249917740588e-05,
"loss": 0.7192,
"step": 752
},
{
"epoch": 0.6657824933687002,
"grad_norm": 0.26229098439216614,
"learning_rate": 1.8794913246276968e-05,
"loss": 0.6838,
"step": 753
},
{
"epoch": 0.6666666666666666,
"grad_norm": 0.30019545555114746,
"learning_rate": 1.879157224605732e-05,
"loss": 0.7036,
"step": 754
},
{
"epoch": 0.667550839964633,
"grad_norm": 0.26639389991760254,
"learning_rate": 1.8788226918726055e-05,
"loss": 0.7087,
"step": 755
},
{
"epoch": 0.6684350132625995,
"grad_norm": 0.27501779794692993,
"learning_rate": 1.8784877265929694e-05,
"loss": 0.6821,
"step": 756
},
{
"epoch": 0.6693191865605659,
"grad_norm": 0.2888008952140808,
"learning_rate": 1.878152328931691e-05,
"loss": 0.6758,
"step": 757
},
{
"epoch": 0.6702033598585323,
"grad_norm": 0.2883148193359375,
"learning_rate": 1.877816499053848e-05,
"loss": 0.6929,
"step": 758
},
{
"epoch": 0.6710875331564987,
"grad_norm": 0.29950571060180664,
"learning_rate": 1.8774802371247328e-05,
"loss": 0.69,
"step": 759
},
{
"epoch": 0.671971706454465,
"grad_norm": 0.2711915075778961,
"learning_rate": 1.8771435433098493e-05,
"loss": 0.6855,
"step": 760
},
{
"epoch": 0.6728558797524314,
"grad_norm": 0.3058179020881653,
"learning_rate": 1.876806417774915e-05,
"loss": 0.7255,
"step": 761
},
{
"epoch": 0.6737400530503979,
"grad_norm": 0.27495062351226807,
"learning_rate": 1.8764688606858585e-05,
"loss": 0.686,
"step": 762
},
{
"epoch": 0.6746242263483643,
"grad_norm": 0.3809083104133606,
"learning_rate": 1.876130872208822e-05,
"loss": 0.6713,
"step": 763
},
{
"epoch": 0.6755083996463307,
"grad_norm": 0.29798707365989685,
"learning_rate": 1.875792452510159e-05,
"loss": 0.6891,
"step": 764
},
{
"epoch": 0.6763925729442971,
"grad_norm": 0.32260382175445557,
"learning_rate": 1.8754536017564366e-05,
"loss": 0.6849,
"step": 765
},
{
"epoch": 0.6772767462422635,
"grad_norm": 0.2910771369934082,
"learning_rate": 1.875114320114433e-05,
"loss": 0.6659,
"step": 766
},
{
"epoch": 0.6781609195402298,
"grad_norm": 0.27919453382492065,
"learning_rate": 1.8747746077511385e-05,
"loss": 0.6815,
"step": 767
},
{
"epoch": 0.6790450928381963,
"grad_norm": 0.34924349188804626,
"learning_rate": 1.8744344648337554e-05,
"loss": 0.6814,
"step": 768
},
{
"epoch": 0.6799292661361627,
"grad_norm": 0.2619962692260742,
"learning_rate": 1.8740938915296992e-05,
"loss": 0.6651,
"step": 769
},
{
"epoch": 0.6808134394341291,
"grad_norm": 0.33637112379074097,
"learning_rate": 1.8737528880065953e-05,
"loss": 0.7245,
"step": 770
},
{
"epoch": 0.6816976127320955,
"grad_norm": 0.2716955840587616,
"learning_rate": 1.8734114544322822e-05,
"loss": 0.6772,
"step": 771
},
{
"epoch": 0.6825817860300619,
"grad_norm": 0.3047332465648651,
"learning_rate": 1.873069590974809e-05,
"loss": 0.7052,
"step": 772
},
{
"epoch": 0.6834659593280283,
"grad_norm": 0.27930551767349243,
"learning_rate": 1.8727272978024376e-05,
"loss": 0.6664,
"step": 773
},
{
"epoch": 0.6843501326259946,
"grad_norm": 0.2784731090068817,
"learning_rate": 1.872384575083641e-05,
"loss": 0.688,
"step": 774
},
{
"epoch": 0.6852343059239611,
"grad_norm": 0.2964549660682678,
"learning_rate": 1.8720414229871026e-05,
"loss": 0.6783,
"step": 775
},
{
"epoch": 0.6861184792219275,
"grad_norm": 0.28060436248779297,
"learning_rate": 1.871697841681719e-05,
"loss": 0.7038,
"step": 776
},
{
"epoch": 0.6870026525198939,
"grad_norm": 0.28276878595352173,
"learning_rate": 1.8713538313365958e-05,
"loss": 0.6887,
"step": 777
},
{
"epoch": 0.6878868258178603,
"grad_norm": 0.2759625315666199,
"learning_rate": 1.8710093921210516e-05,
"loss": 0.679,
"step": 778
},
{
"epoch": 0.6887709991158267,
"grad_norm": 0.30537715554237366,
"learning_rate": 1.8706645242046158e-05,
"loss": 0.6915,
"step": 779
},
{
"epoch": 0.6896551724137931,
"grad_norm": 0.34021756052970886,
"learning_rate": 1.8703192277570277e-05,
"loss": 0.6851,
"step": 780
},
{
"epoch": 0.6905393457117595,
"grad_norm": 0.2879871428012848,
"learning_rate": 1.8699735029482387e-05,
"loss": 0.6941,
"step": 781
},
{
"epoch": 0.6914235190097259,
"grad_norm": 0.2722664475440979,
"learning_rate": 1.8696273499484106e-05,
"loss": 0.6834,
"step": 782
},
{
"epoch": 0.6923076923076923,
"grad_norm": 0.2887817621231079,
"learning_rate": 1.869280768927916e-05,
"loss": 0.6867,
"step": 783
},
{
"epoch": 0.6931918656056587,
"grad_norm": 0.27706626057624817,
"learning_rate": 1.8689337600573382e-05,
"loss": 0.6807,
"step": 784
},
{
"epoch": 0.6940760389036251,
"grad_norm": 0.26724323630332947,
"learning_rate": 1.8685863235074705e-05,
"loss": 0.7017,
"step": 785
},
{
"epoch": 0.6949602122015915,
"grad_norm": 0.28479787707328796,
"learning_rate": 1.8682384594493176e-05,
"loss": 0.6897,
"step": 786
},
{
"epoch": 0.695844385499558,
"grad_norm": 0.2582482397556305,
"learning_rate": 1.8678901680540945e-05,
"loss": 0.6714,
"step": 787
},
{
"epoch": 0.6967285587975243,
"grad_norm": 0.28330233693122864,
"learning_rate": 1.8675414494932257e-05,
"loss": 0.6797,
"step": 788
},
{
"epoch": 0.6976127320954907,
"grad_norm": 0.26773005723953247,
"learning_rate": 1.8671923039383467e-05,
"loss": 0.661,
"step": 789
},
{
"epoch": 0.6984969053934571,
"grad_norm": 0.2895766496658325,
"learning_rate": 1.866842731561303e-05,
"loss": 0.711,
"step": 790
},
{
"epoch": 0.6993810786914235,
"grad_norm": 0.2639464735984802,
"learning_rate": 1.8664927325341505e-05,
"loss": 0.6983,
"step": 791
},
{
"epoch": 0.7002652519893899,
"grad_norm": 0.2962898015975952,
"learning_rate": 1.8661423070291538e-05,
"loss": 0.6525,
"step": 792
},
{
"epoch": 0.7011494252873564,
"grad_norm": 0.26293328404426575,
"learning_rate": 1.865791455218789e-05,
"loss": 0.6835,
"step": 793
},
{
"epoch": 0.7020335985853228,
"grad_norm": 0.28773537278175354,
"learning_rate": 1.865440177275742e-05,
"loss": 0.6862,
"step": 794
},
{
"epoch": 0.7029177718832891,
"grad_norm": 0.2749754786491394,
"learning_rate": 1.865088473372906e-05,
"loss": 0.6713,
"step": 795
},
{
"epoch": 0.7038019451812555,
"grad_norm": 0.2794829308986664,
"learning_rate": 1.8647363436833874e-05,
"loss": 0.6865,
"step": 796
},
{
"epoch": 0.7046861184792219,
"grad_norm": 0.28543275594711304,
"learning_rate": 1.8643837883805e-05,
"loss": 0.6924,
"step": 797
},
{
"epoch": 0.7055702917771883,
"grad_norm": 0.2908453047275543,
"learning_rate": 1.8640308076377666e-05,
"loss": 0.6974,
"step": 798
},
{
"epoch": 0.7064544650751547,
"grad_norm": 0.27419716119766235,
"learning_rate": 1.863677401628921e-05,
"loss": 0.6867,
"step": 799
},
{
"epoch": 0.7073386383731212,
"grad_norm": 0.2997085154056549,
"learning_rate": 1.863323570527906e-05,
"loss": 0.6972,
"step": 800
},
{
"epoch": 0.7082228116710876,
"grad_norm": 0.2897700369358063,
"learning_rate": 1.862969314508872e-05,
"loss": 0.6624,
"step": 801
},
{
"epoch": 0.7091069849690539,
"grad_norm": 0.27029433846473694,
"learning_rate": 1.8626146337461813e-05,
"loss": 0.6795,
"step": 802
},
{
"epoch": 0.7099911582670203,
"grad_norm": 0.2865094840526581,
"learning_rate": 1.8622595284144026e-05,
"loss": 0.6792,
"step": 803
},
{
"epoch": 0.7108753315649867,
"grad_norm": 0.27018409967422485,
"learning_rate": 1.861903998688315e-05,
"loss": 0.6737,
"step": 804
},
{
"epoch": 0.7117595048629531,
"grad_norm": 0.29579800367355347,
"learning_rate": 1.8615480447429064e-05,
"loss": 0.6836,
"step": 805
},
{
"epoch": 0.7126436781609196,
"grad_norm": 0.27648472785949707,
"learning_rate": 1.8611916667533734e-05,
"loss": 0.6963,
"step": 806
},
{
"epoch": 0.713527851458886,
"grad_norm": 0.28229227662086487,
"learning_rate": 1.8608348648951207e-05,
"loss": 0.6673,
"step": 807
},
{
"epoch": 0.7144120247568524,
"grad_norm": 0.27765771746635437,
"learning_rate": 1.860477639343762e-05,
"loss": 0.6995,
"step": 808
},
{
"epoch": 0.7152961980548187,
"grad_norm": 0.2719252109527588,
"learning_rate": 1.8601199902751203e-05,
"loss": 0.6659,
"step": 809
},
{
"epoch": 0.7161803713527851,
"grad_norm": 0.3025103807449341,
"learning_rate": 1.859761917865226e-05,
"loss": 0.6808,
"step": 810
},
{
"epoch": 0.7170645446507515,
"grad_norm": 0.2854987382888794,
"learning_rate": 1.8594034222903183e-05,
"loss": 0.6772,
"step": 811
},
{
"epoch": 0.717948717948718,
"grad_norm": 0.27578482031822205,
"learning_rate": 1.8590445037268444e-05,
"loss": 0.685,
"step": 812
},
{
"epoch": 0.7188328912466844,
"grad_norm": 0.27414318919181824,
"learning_rate": 1.8586851623514602e-05,
"loss": 0.6743,
"step": 813
},
{
"epoch": 0.7197170645446508,
"grad_norm": 0.2895112931728363,
"learning_rate": 1.85832539834103e-05,
"loss": 0.684,
"step": 814
},
{
"epoch": 0.7206012378426172,
"grad_norm": 0.26398617029190063,
"learning_rate": 1.8579652118726243e-05,
"loss": 0.6782,
"step": 815
},
{
"epoch": 0.7214854111405835,
"grad_norm": 0.26850610971450806,
"learning_rate": 1.8576046031235237e-05,
"loss": 0.6823,
"step": 816
},
{
"epoch": 0.7223695844385499,
"grad_norm": 0.2874492108821869,
"learning_rate": 1.8572435722712154e-05,
"loss": 0.6889,
"step": 817
},
{
"epoch": 0.7232537577365163,
"grad_norm": 0.28707626461982727,
"learning_rate": 1.8568821194933945e-05,
"loss": 0.7093,
"step": 818
},
{
"epoch": 0.7241379310344828,
"grad_norm": 0.28168824315071106,
"learning_rate": 1.8565202449679643e-05,
"loss": 0.6506,
"step": 819
},
{
"epoch": 0.7250221043324492,
"grad_norm": 0.29286202788352966,
"learning_rate": 1.856157948873035e-05,
"loss": 0.6996,
"step": 820
},
{
"epoch": 0.7259062776304156,
"grad_norm": 0.2765836715698242,
"learning_rate": 1.855795231386925e-05,
"loss": 0.6866,
"step": 821
},
{
"epoch": 0.726790450928382,
"grad_norm": 0.26691484451293945,
"learning_rate": 1.8554320926881592e-05,
"loss": 0.7014,
"step": 822
},
{
"epoch": 0.7276746242263483,
"grad_norm": 0.26627305150032043,
"learning_rate": 1.8550685329554706e-05,
"loss": 0.6635,
"step": 823
},
{
"epoch": 0.7285587975243147,
"grad_norm": 0.2527620494365692,
"learning_rate": 1.8547045523677995e-05,
"loss": 0.6434,
"step": 824
},
{
"epoch": 0.7294429708222812,
"grad_norm": 0.272854208946228,
"learning_rate": 1.8543401511042925e-05,
"loss": 0.687,
"step": 825
},
{
"epoch": 0.7303271441202476,
"grad_norm": 0.2839547395706177,
"learning_rate": 1.8539753293443038e-05,
"loss": 0.6783,
"step": 826
},
{
"epoch": 0.731211317418214,
"grad_norm": 0.30083492398262024,
"learning_rate": 1.8536100872673946e-05,
"loss": 0.6825,
"step": 827
},
{
"epoch": 0.7320954907161804,
"grad_norm": 0.2861270308494568,
"learning_rate": 1.8532444250533328e-05,
"loss": 0.6774,
"step": 828
},
{
"epoch": 0.7329796640141468,
"grad_norm": 0.3001328706741333,
"learning_rate": 1.8528783428820934e-05,
"loss": 0.6569,
"step": 829
},
{
"epoch": 0.7338638373121131,
"grad_norm": 0.2594626545906067,
"learning_rate": 1.8525118409338578e-05,
"loss": 0.6682,
"step": 830
},
{
"epoch": 0.7347480106100795,
"grad_norm": 0.28760477900505066,
"learning_rate": 1.852144919389014e-05,
"loss": 0.6826,
"step": 831
},
{
"epoch": 0.735632183908046,
"grad_norm": 0.2748079299926758,
"learning_rate": 1.8517775784281564e-05,
"loss": 0.6712,
"step": 832
},
{
"epoch": 0.7365163572060124,
"grad_norm": 0.2913218140602112,
"learning_rate": 1.8514098182320864e-05,
"loss": 0.6911,
"step": 833
},
{
"epoch": 0.7374005305039788,
"grad_norm": 0.2720274329185486,
"learning_rate": 1.851041638981811e-05,
"loss": 0.6535,
"step": 834
},
{
"epoch": 0.7382847038019452,
"grad_norm": 0.27112436294555664,
"learning_rate": 1.8506730408585443e-05,
"loss": 0.6698,
"step": 835
},
{
"epoch": 0.7391688770999116,
"grad_norm": 0.28343087434768677,
"learning_rate": 1.8503040240437056e-05,
"loss": 0.6928,
"step": 836
},
{
"epoch": 0.7400530503978779,
"grad_norm": 0.2626572847366333,
"learning_rate": 1.8499345887189215e-05,
"loss": 0.6466,
"step": 837
},
{
"epoch": 0.7409372236958444,
"grad_norm": 0.2974900007247925,
"learning_rate": 1.8495647350660227e-05,
"loss": 0.6712,
"step": 838
},
{
"epoch": 0.7418213969938108,
"grad_norm": 0.28902268409729004,
"learning_rate": 1.8491944632670483e-05,
"loss": 0.6704,
"step": 839
},
{
"epoch": 0.7427055702917772,
"grad_norm": 0.2591244876384735,
"learning_rate": 1.8488237735042406e-05,
"loss": 0.6723,
"step": 840
},
{
"epoch": 0.7435897435897436,
"grad_norm": 0.26890426874160767,
"learning_rate": 1.84845266596005e-05,
"loss": 0.6751,
"step": 841
},
{
"epoch": 0.74447391688771,
"grad_norm": 0.2706976532936096,
"learning_rate": 1.8480811408171304e-05,
"loss": 0.7003,
"step": 842
},
{
"epoch": 0.7453580901856764,
"grad_norm": 0.28512752056121826,
"learning_rate": 1.8477091982583428e-05,
"loss": 0.6712,
"step": 843
},
{
"epoch": 0.7462422634836428,
"grad_norm": 0.25641173124313354,
"learning_rate": 1.8473368384667532e-05,
"loss": 0.6926,
"step": 844
},
{
"epoch": 0.7471264367816092,
"grad_norm": 0.29127317667007446,
"learning_rate": 1.8469640616256323e-05,
"loss": 0.663,
"step": 845
},
{
"epoch": 0.7480106100795756,
"grad_norm": 0.2720525562763214,
"learning_rate": 1.846590867918457e-05,
"loss": 0.7001,
"step": 846
},
{
"epoch": 0.748894783377542,
"grad_norm": 0.2741756737232208,
"learning_rate": 1.846217257528909e-05,
"loss": 0.6577,
"step": 847
},
{
"epoch": 0.7497789566755084,
"grad_norm": 0.26795852184295654,
"learning_rate": 1.845843230640875e-05,
"loss": 0.6698,
"step": 848
},
{
"epoch": 0.7506631299734748,
"grad_norm": 0.2630748450756073,
"learning_rate": 1.845468787438447e-05,
"loss": 0.6887,
"step": 849
},
{
"epoch": 0.7515473032714411,
"grad_norm": 0.2683887183666229,
"learning_rate": 1.845093928105921e-05,
"loss": 0.6961,
"step": 850
},
{
"epoch": 0.7524314765694076,
"grad_norm": 0.2629952132701874,
"learning_rate": 1.8447186528277993e-05,
"loss": 0.6768,
"step": 851
},
{
"epoch": 0.753315649867374,
"grad_norm": 0.26082801818847656,
"learning_rate": 1.8443429617887875e-05,
"loss": 0.6759,
"step": 852
},
{
"epoch": 0.7541998231653404,
"grad_norm": 0.26892992854118347,
"learning_rate": 1.8439668551737968e-05,
"loss": 0.6901,
"step": 853
},
{
"epoch": 0.7550839964633068,
"grad_norm": 0.26609930396080017,
"learning_rate": 1.8435903331679426e-05,
"loss": 0.6861,
"step": 854
},
{
"epoch": 0.7559681697612732,
"grad_norm": 0.2627010643482208,
"learning_rate": 1.843213395956544e-05,
"loss": 0.6774,
"step": 855
},
{
"epoch": 0.7568523430592397,
"grad_norm": 0.26876378059387207,
"learning_rate": 1.8428360437251264e-05,
"loss": 0.6738,
"step": 856
},
{
"epoch": 0.757736516357206,
"grad_norm": 0.25511404871940613,
"learning_rate": 1.8424582766594176e-05,
"loss": 0.6721,
"step": 857
},
{
"epoch": 0.7586206896551724,
"grad_norm": 0.2764536142349243,
"learning_rate": 1.8420800949453503e-05,
"loss": 0.6789,
"step": 858
},
{
"epoch": 0.7595048629531388,
"grad_norm": 0.26369649171829224,
"learning_rate": 1.8417014987690613e-05,
"loss": 0.6804,
"step": 859
},
{
"epoch": 0.7603890362511052,
"grad_norm": 0.2718081772327423,
"learning_rate": 1.8413224883168912e-05,
"loss": 0.6485,
"step": 860
},
{
"epoch": 0.7612732095490716,
"grad_norm": 0.2816314101219177,
"learning_rate": 1.840943063775385e-05,
"loss": 0.6906,
"step": 861
},
{
"epoch": 0.762157382847038,
"grad_norm": 0.2648642659187317,
"learning_rate": 1.84056322533129e-05,
"loss": 0.6807,
"step": 862
},
{
"epoch": 0.7630415561450045,
"grad_norm": 0.2795509696006775,
"learning_rate": 1.8401829731715598e-05,
"loss": 0.6756,
"step": 863
},
{
"epoch": 0.7639257294429708,
"grad_norm": 0.2620922029018402,
"learning_rate": 1.8398023074833496e-05,
"loss": 0.6667,
"step": 864
},
{
"epoch": 0.7648099027409372,
"grad_norm": 0.2989812195301056,
"learning_rate": 1.8394212284540183e-05,
"loss": 0.6692,
"step": 865
},
{
"epoch": 0.7656940760389036,
"grad_norm": 0.2825232148170471,
"learning_rate": 1.8390397362711293e-05,
"loss": 0.6924,
"step": 866
},
{
"epoch": 0.76657824933687,
"grad_norm": 0.2771415114402771,
"learning_rate": 1.8386578311224483e-05,
"loss": 0.676,
"step": 867
},
{
"epoch": 0.7674624226348364,
"grad_norm": 0.30649229884147644,
"learning_rate": 1.838275513195945e-05,
"loss": 0.6969,
"step": 868
},
{
"epoch": 0.7683465959328029,
"grad_norm": 0.27024170756340027,
"learning_rate": 1.8378927826797915e-05,
"loss": 0.6984,
"step": 869
},
{
"epoch": 0.7692307692307693,
"grad_norm": 0.2719253897666931,
"learning_rate": 1.8375096397623637e-05,
"loss": 0.6749,
"step": 870
},
{
"epoch": 0.7701149425287356,
"grad_norm": 0.2832467555999756,
"learning_rate": 1.83712608463224e-05,
"loss": 0.7317,
"step": 871
},
{
"epoch": 0.770999115826702,
"grad_norm": 0.28128424286842346,
"learning_rate": 1.836742117478202e-05,
"loss": 0.6907,
"step": 872
},
{
"epoch": 0.7718832891246684,
"grad_norm": 0.2697291374206543,
"learning_rate": 1.836357738489234e-05,
"loss": 0.6919,
"step": 873
},
{
"epoch": 0.7727674624226348,
"grad_norm": 0.27289918065071106,
"learning_rate": 1.8359729478545225e-05,
"loss": 0.6649,
"step": 874
},
{
"epoch": 0.7736516357206012,
"grad_norm": 0.26393064856529236,
"learning_rate": 1.8355877457634576e-05,
"loss": 0.6772,
"step": 875
},
{
"epoch": 0.7745358090185677,
"grad_norm": 0.285037100315094,
"learning_rate": 1.8352021324056314e-05,
"loss": 0.6765,
"step": 876
},
{
"epoch": 0.7754199823165341,
"grad_norm": 0.2858329117298126,
"learning_rate": 1.834816107970838e-05,
"loss": 0.668,
"step": 877
},
{
"epoch": 0.7763041556145004,
"grad_norm": 0.30717769265174866,
"learning_rate": 1.8344296726490746e-05,
"loss": 0.6838,
"step": 878
},
{
"epoch": 0.7771883289124668,
"grad_norm": 0.2694529592990875,
"learning_rate": 1.8340428266305398e-05,
"loss": 0.6827,
"step": 879
},
{
"epoch": 0.7780725022104332,
"grad_norm": 0.2850857675075531,
"learning_rate": 1.833655570105635e-05,
"loss": 0.6624,
"step": 880
},
{
"epoch": 0.7789566755083996,
"grad_norm": 0.26817429065704346,
"learning_rate": 1.833267903264964e-05,
"loss": 0.6437,
"step": 881
},
{
"epoch": 0.7798408488063661,
"grad_norm": 0.27654963731765747,
"learning_rate": 1.8328798262993313e-05,
"loss": 0.6675,
"step": 882
},
{
"epoch": 0.7807250221043325,
"grad_norm": 0.27001437544822693,
"learning_rate": 1.832491339399744e-05,
"loss": 0.6472,
"step": 883
},
{
"epoch": 0.7816091954022989,
"grad_norm": 0.27787089347839355,
"learning_rate": 1.8321024427574112e-05,
"loss": 0.677,
"step": 884
},
{
"epoch": 0.7824933687002652,
"grad_norm": 0.26041993498802185,
"learning_rate": 1.8317131365637434e-05,
"loss": 0.6822,
"step": 885
},
{
"epoch": 0.7833775419982316,
"grad_norm": 0.2952263355255127,
"learning_rate": 1.8313234210103527e-05,
"loss": 0.6839,
"step": 886
},
{
"epoch": 0.784261715296198,
"grad_norm": 0.27763888239860535,
"learning_rate": 1.830933296289052e-05,
"loss": 0.7105,
"step": 887
},
{
"epoch": 0.7851458885941645,
"grad_norm": 0.2820242643356323,
"learning_rate": 1.8305427625918574e-05,
"loss": 0.6859,
"step": 888
},
{
"epoch": 0.7860300618921309,
"grad_norm": 0.2836971879005432,
"learning_rate": 1.830151820110984e-05,
"loss": 0.6837,
"step": 889
},
{
"epoch": 0.7869142351900973,
"grad_norm": 0.28041893243789673,
"learning_rate": 1.8297604690388498e-05,
"loss": 0.6894,
"step": 890
},
{
"epoch": 0.7877984084880637,
"grad_norm": 0.2706386148929596,
"learning_rate": 1.8293687095680735e-05,
"loss": 0.6655,
"step": 891
},
{
"epoch": 0.78868258178603,
"grad_norm": 0.28449690341949463,
"learning_rate": 1.8289765418914743e-05,
"loss": 0.6548,
"step": 892
},
{
"epoch": 0.7895667550839964,
"grad_norm": 0.2735328674316406,
"learning_rate": 1.8285839662020725e-05,
"loss": 0.6648,
"step": 893
},
{
"epoch": 0.7904509283819628,
"grad_norm": 0.28461381793022156,
"learning_rate": 1.82819098269309e-05,
"loss": 0.6925,
"step": 894
},
{
"epoch": 0.7913351016799293,
"grad_norm": 0.28144946694374084,
"learning_rate": 1.827797591557948e-05,
"loss": 0.7078,
"step": 895
},
{
"epoch": 0.7922192749778957,
"grad_norm": 0.29551348090171814,
"learning_rate": 1.82740379299027e-05,
"loss": 0.6704,
"step": 896
},
{
"epoch": 0.7931034482758621,
"grad_norm": 0.27279067039489746,
"learning_rate": 1.8270095871838788e-05,
"loss": 0.6997,
"step": 897
},
{
"epoch": 0.7939876215738285,
"grad_norm": 0.29390403628349304,
"learning_rate": 1.8266149743327978e-05,
"loss": 0.6654,
"step": 898
},
{
"epoch": 0.7948717948717948,
"grad_norm": 0.2852860391139984,
"learning_rate": 1.826219954631251e-05,
"loss": 0.6852,
"step": 899
},
{
"epoch": 0.7957559681697612,
"grad_norm": 0.3078218400478363,
"learning_rate": 1.8258245282736627e-05,
"loss": 0.6926,
"step": 900
},
{
"epoch": 0.7966401414677277,
"grad_norm": 0.30486321449279785,
"learning_rate": 1.8254286954546572e-05,
"loss": 0.6773,
"step": 901
},
{
"epoch": 0.7975243147656941,
"grad_norm": 0.28244778513908386,
"learning_rate": 1.825032456369059e-05,
"loss": 0.6892,
"step": 902
},
{
"epoch": 0.7984084880636605,
"grad_norm": 0.32668113708496094,
"learning_rate": 1.8246358112118923e-05,
"loss": 0.6744,
"step": 903
},
{
"epoch": 0.7992926613616269,
"grad_norm": 0.2697162628173828,
"learning_rate": 1.8242387601783815e-05,
"loss": 0.6908,
"step": 904
},
{
"epoch": 0.8001768346595933,
"grad_norm": 0.31500083208084106,
"learning_rate": 1.8238413034639504e-05,
"loss": 0.6733,
"step": 905
},
{
"epoch": 0.8010610079575596,
"grad_norm": 0.2815505564212799,
"learning_rate": 1.8234434412642226e-05,
"loss": 0.6583,
"step": 906
},
{
"epoch": 0.801945181255526,
"grad_norm": 0.2784971296787262,
"learning_rate": 1.823045173775022e-05,
"loss": 0.6932,
"step": 907
},
{
"epoch": 0.8028293545534925,
"grad_norm": 0.2708076536655426,
"learning_rate": 1.8226465011923707e-05,
"loss": 0.6866,
"step": 908
},
{
"epoch": 0.8037135278514589,
"grad_norm": 0.2688702642917633,
"learning_rate": 1.8222474237124904e-05,
"loss": 0.6613,
"step": 909
},
{
"epoch": 0.8045977011494253,
"grad_norm": 0.2704801857471466,
"learning_rate": 1.821847941531804e-05,
"loss": 0.6852,
"step": 910
},
{
"epoch": 0.8054818744473917,
"grad_norm": 0.2773894667625427,
"learning_rate": 1.82144805484693e-05,
"loss": 0.7095,
"step": 911
},
{
"epoch": 0.8063660477453581,
"grad_norm": 0.2783767580986023,
"learning_rate": 1.82104776385469e-05,
"loss": 0.6765,
"step": 912
},
{
"epoch": 0.8072502210433244,
"grad_norm": 0.27740514278411865,
"learning_rate": 1.820647068752101e-05,
"loss": 0.6635,
"step": 913
},
{
"epoch": 0.8081343943412909,
"grad_norm": 0.2748161554336548,
"learning_rate": 1.820245969736382e-05,
"loss": 0.6599,
"step": 914
},
{
"epoch": 0.8090185676392573,
"grad_norm": 0.27055516839027405,
"learning_rate": 1.8198444670049488e-05,
"loss": 0.6928,
"step": 915
},
{
"epoch": 0.8099027409372237,
"grad_norm": 0.26117920875549316,
"learning_rate": 1.819442560755416e-05,
"loss": 0.6817,
"step": 916
},
{
"epoch": 0.8107869142351901,
"grad_norm": 0.2692125737667084,
"learning_rate": 1.819040251185598e-05,
"loss": 0.6849,
"step": 917
},
{
"epoch": 0.8116710875331565,
"grad_norm": 0.2597946524620056,
"learning_rate": 1.8186375384935062e-05,
"loss": 0.6632,
"step": 918
},
{
"epoch": 0.812555260831123,
"grad_norm": 0.2677350342273712,
"learning_rate": 1.818234422877352e-05,
"loss": 0.7183,
"step": 919
},
{
"epoch": 0.8134394341290893,
"grad_norm": 0.25201910734176636,
"learning_rate": 1.817830904535544e-05,
"loss": 0.6617,
"step": 920
},
{
"epoch": 0.8143236074270557,
"grad_norm": 0.2762417793273926,
"learning_rate": 1.8174269836666892e-05,
"loss": 0.6748,
"step": 921
},
{
"epoch": 0.8152077807250221,
"grad_norm": 0.2679734230041504,
"learning_rate": 1.817022660469593e-05,
"loss": 0.6963,
"step": 922
},
{
"epoch": 0.8160919540229885,
"grad_norm": 0.2793523073196411,
"learning_rate": 1.8166179351432586e-05,
"loss": 0.6806,
"step": 923
},
{
"epoch": 0.8169761273209549,
"grad_norm": 0.2657225430011749,
"learning_rate": 1.816212807886887e-05,
"loss": 0.6749,
"step": 924
},
{
"epoch": 0.8178603006189213,
"grad_norm": 0.2591228485107422,
"learning_rate": 1.8158072788998774e-05,
"loss": 0.6632,
"step": 925
},
{
"epoch": 0.8187444739168878,
"grad_norm": 0.2631901204586029,
"learning_rate": 1.8154013483818265e-05,
"loss": 0.6776,
"step": 926
},
{
"epoch": 0.8196286472148541,
"grad_norm": 0.27424156665802,
"learning_rate": 1.8149950165325294e-05,
"loss": 0.6733,
"step": 927
},
{
"epoch": 0.8205128205128205,
"grad_norm": 0.2525295615196228,
"learning_rate": 1.8145882835519766e-05,
"loss": 0.6495,
"step": 928
},
{
"epoch": 0.8213969938107869,
"grad_norm": 0.2610355615615845,
"learning_rate": 1.8141811496403584e-05,
"loss": 0.657,
"step": 929
},
{
"epoch": 0.8222811671087533,
"grad_norm": 0.27828148007392883,
"learning_rate": 1.8137736149980613e-05,
"loss": 0.6676,
"step": 930
},
{
"epoch": 0.8231653404067197,
"grad_norm": 0.27071613073349,
"learning_rate": 1.8133656798256692e-05,
"loss": 0.6565,
"step": 931
},
{
"epoch": 0.8240495137046862,
"grad_norm": 0.26698359847068787,
"learning_rate": 1.8129573443239632e-05,
"loss": 0.6656,
"step": 932
},
{
"epoch": 0.8249336870026526,
"grad_norm": 0.27760380506515503,
"learning_rate": 1.812548608693921e-05,
"loss": 0.6939,
"step": 933
},
{
"epoch": 0.8258178603006189,
"grad_norm": 0.2623103857040405,
"learning_rate": 1.812139473136718e-05,
"loss": 0.6482,
"step": 934
},
{
"epoch": 0.8267020335985853,
"grad_norm": 0.26979145407676697,
"learning_rate": 1.811729937853726e-05,
"loss": 0.6627,
"step": 935
},
{
"epoch": 0.8275862068965517,
"grad_norm": 0.27716580033302307,
"learning_rate": 1.8113200030465134e-05,
"loss": 0.6381,
"step": 936
},
{
"epoch": 0.8284703801945181,
"grad_norm": 0.27702417969703674,
"learning_rate": 1.8109096689168454e-05,
"loss": 0.6731,
"step": 937
},
{
"epoch": 0.8293545534924845,
"grad_norm": 0.25983765721321106,
"learning_rate": 1.810498935666684e-05,
"loss": 0.6654,
"step": 938
},
{
"epoch": 0.830238726790451,
"grad_norm": 0.273832231760025,
"learning_rate": 1.8100878034981876e-05,
"loss": 0.6994,
"step": 939
},
{
"epoch": 0.8311229000884174,
"grad_norm": 0.2666497826576233,
"learning_rate": 1.8096762726137106e-05,
"loss": 0.6533,
"step": 940
},
{
"epoch": 0.8320070733863837,
"grad_norm": 0.2834012508392334,
"learning_rate": 1.8092643432158037e-05,
"loss": 0.7002,
"step": 941
},
{
"epoch": 0.8328912466843501,
"grad_norm": 0.28017157316207886,
"learning_rate": 1.8088520155072135e-05,
"loss": 0.6819,
"step": 942
},
{
"epoch": 0.8337754199823165,
"grad_norm": 0.26762130856513977,
"learning_rate": 1.8084392896908836e-05,
"loss": 0.685,
"step": 943
},
{
"epoch": 0.8346595932802829,
"grad_norm": 0.2715948224067688,
"learning_rate": 1.808026165969953e-05,
"loss": 0.6575,
"step": 944
},
{
"epoch": 0.8355437665782494,
"grad_norm": 0.2740465998649597,
"learning_rate": 1.8076126445477564e-05,
"loss": 0.6627,
"step": 945
},
{
"epoch": 0.8364279398762158,
"grad_norm": 0.28308868408203125,
"learning_rate": 1.8071987256278235e-05,
"loss": 0.6755,
"step": 946
},
{
"epoch": 0.8373121131741822,
"grad_norm": 0.30482539534568787,
"learning_rate": 1.8067844094138815e-05,
"loss": 0.6503,
"step": 947
},
{
"epoch": 0.8381962864721485,
"grad_norm": 0.2840719521045685,
"learning_rate": 1.8063696961098517e-05,
"loss": 0.6524,
"step": 948
},
{
"epoch": 0.8390804597701149,
"grad_norm": 0.269832968711853,
"learning_rate": 1.8059545859198514e-05,
"loss": 0.6904,
"step": 949
},
{
"epoch": 0.8399646330680813,
"grad_norm": 0.2816389799118042,
"learning_rate": 1.8055390790481927e-05,
"loss": 0.6854,
"step": 950
},
{
"epoch": 0.8408488063660478,
"grad_norm": 0.28061866760253906,
"learning_rate": 1.805123175699384e-05,
"loss": 0.6985,
"step": 951
},
{
"epoch": 0.8417329796640142,
"grad_norm": 0.2712833285331726,
"learning_rate": 1.8047068760781278e-05,
"loss": 0.673,
"step": 952
},
{
"epoch": 0.8426171529619806,
"grad_norm": 0.27095815539360046,
"learning_rate": 1.804290180389322e-05,
"loss": 0.6755,
"step": 953
},
{
"epoch": 0.843501326259947,
"grad_norm": 0.2653496265411377,
"learning_rate": 1.8038730888380592e-05,
"loss": 0.6748,
"step": 954
},
{
"epoch": 0.8443854995579133,
"grad_norm": 0.261883944272995,
"learning_rate": 1.803455601629628e-05,
"loss": 0.6547,
"step": 955
},
{
"epoch": 0.8452696728558797,
"grad_norm": 0.2585887610912323,
"learning_rate": 1.80303771896951e-05,
"loss": 0.6675,
"step": 956
},
{
"epoch": 0.8461538461538461,
"grad_norm": 0.2751072347164154,
"learning_rate": 1.8026194410633825e-05,
"loss": 0.6637,
"step": 957
},
{
"epoch": 0.8470380194518126,
"grad_norm": 0.2901028096675873,
"learning_rate": 1.802200768117117e-05,
"loss": 0.6849,
"step": 958
},
{
"epoch": 0.847922192749779,
"grad_norm": 0.27229058742523193,
"learning_rate": 1.80178170033678e-05,
"loss": 0.6709,
"step": 959
},
{
"epoch": 0.8488063660477454,
"grad_norm": 0.2877761423587799,
"learning_rate": 1.8013622379286317e-05,
"loss": 0.7114,
"step": 960
},
{
"epoch": 0.8496905393457118,
"grad_norm": 0.27011096477508545,
"learning_rate": 1.8009423810991265e-05,
"loss": 0.671,
"step": 961
},
{
"epoch": 0.8505747126436781,
"grad_norm": 0.2779689133167267,
"learning_rate": 1.8005221300549132e-05,
"loss": 0.6464,
"step": 962
},
{
"epoch": 0.8514588859416445,
"grad_norm": 0.26694098114967346,
"learning_rate": 1.8001014850028348e-05,
"loss": 0.6513,
"step": 963
},
{
"epoch": 0.852343059239611,
"grad_norm": 0.2892657518386841,
"learning_rate": 1.799680446149928e-05,
"loss": 0.6747,
"step": 964
},
{
"epoch": 0.8532272325375774,
"grad_norm": 0.28105780482292175,
"learning_rate": 1.7992590137034227e-05,
"loss": 0.6959,
"step": 965
},
{
"epoch": 0.8541114058355438,
"grad_norm": 0.30166932940483093,
"learning_rate": 1.798837187870744e-05,
"loss": 0.6939,
"step": 966
},
{
"epoch": 0.8549955791335102,
"grad_norm": 0.2654246985912323,
"learning_rate": 1.7984149688595092e-05,
"loss": 0.6909,
"step": 967
},
{
"epoch": 0.8558797524314765,
"grad_norm": 0.2952018976211548,
"learning_rate": 1.7979923568775296e-05,
"loss": 0.6854,
"step": 968
},
{
"epoch": 0.8567639257294429,
"grad_norm": 0.2771957814693451,
"learning_rate": 1.7975693521328107e-05,
"loss": 0.6813,
"step": 969
},
{
"epoch": 0.8576480990274093,
"grad_norm": 0.2915500998497009,
"learning_rate": 1.7971459548335503e-05,
"loss": 0.6749,
"step": 970
},
{
"epoch": 0.8585322723253758,
"grad_norm": 0.26830586791038513,
"learning_rate": 1.796722165188139e-05,
"loss": 0.685,
"step": 971
},
{
"epoch": 0.8594164456233422,
"grad_norm": 0.2885337471961975,
"learning_rate": 1.7962979834051625e-05,
"loss": 0.6793,
"step": 972
},
{
"epoch": 0.8603006189213086,
"grad_norm": 0.2905611991882324,
"learning_rate": 1.795873409693397e-05,
"loss": 0.6699,
"step": 973
},
{
"epoch": 0.861184792219275,
"grad_norm": 0.2650064527988434,
"learning_rate": 1.7954484442618136e-05,
"loss": 0.6675,
"step": 974
},
{
"epoch": 0.8620689655172413,
"grad_norm": 0.31721997261047363,
"learning_rate": 1.7950230873195754e-05,
"loss": 0.6781,
"step": 975
},
{
"epoch": 0.8629531388152077,
"grad_norm": 0.26145753264427185,
"learning_rate": 1.7945973390760378e-05,
"loss": 0.6733,
"step": 976
},
{
"epoch": 0.8638373121131742,
"grad_norm": 0.3047639727592468,
"learning_rate": 1.7941711997407497e-05,
"loss": 0.7058,
"step": 977
},
{
"epoch": 0.8647214854111406,
"grad_norm": 0.2580280303955078,
"learning_rate": 1.7937446695234517e-05,
"loss": 0.665,
"step": 978
},
{
"epoch": 0.865605658709107,
"grad_norm": 0.29127317667007446,
"learning_rate": 1.7933177486340775e-05,
"loss": 0.6507,
"step": 979
},
{
"epoch": 0.8664898320070734,
"grad_norm": 0.2819880247116089,
"learning_rate": 1.7928904372827523e-05,
"loss": 0.6658,
"step": 980
},
{
"epoch": 0.8673740053050398,
"grad_norm": 0.3024705946445465,
"learning_rate": 1.792462735679794e-05,
"loss": 0.6607,
"step": 981
},
{
"epoch": 0.8682581786030061,
"grad_norm": 0.25877004861831665,
"learning_rate": 1.7920346440357123e-05,
"loss": 0.6671,
"step": 982
},
{
"epoch": 0.8691423519009726,
"grad_norm": 0.2850501239299774,
"learning_rate": 1.7916061625612093e-05,
"loss": 0.6767,
"step": 983
},
{
"epoch": 0.870026525198939,
"grad_norm": 0.27729132771492004,
"learning_rate": 1.7911772914671788e-05,
"loss": 0.6509,
"step": 984
},
{
"epoch": 0.8709106984969054,
"grad_norm": 0.2668955624103546,
"learning_rate": 1.790748030964706e-05,
"loss": 0.6673,
"step": 985
},
{
"epoch": 0.8717948717948718,
"grad_norm": 0.26832088828086853,
"learning_rate": 1.790318381265068e-05,
"loss": 0.6752,
"step": 986
},
{
"epoch": 0.8726790450928382,
"grad_norm": 0.2544272840023041,
"learning_rate": 1.7898883425797333e-05,
"loss": 0.6715,
"step": 987
},
{
"epoch": 0.8735632183908046,
"grad_norm": 0.2611301839351654,
"learning_rate": 1.7894579151203623e-05,
"loss": 0.667,
"step": 988
},
{
"epoch": 0.874447391688771,
"grad_norm": 0.25763335824012756,
"learning_rate": 1.789027099098807e-05,
"loss": 0.6538,
"step": 989
},
{
"epoch": 0.8753315649867374,
"grad_norm": 0.276043176651001,
"learning_rate": 1.7885958947271094e-05,
"loss": 0.6506,
"step": 990
},
{
"epoch": 0.8762157382847038,
"grad_norm": 0.2684342861175537,
"learning_rate": 1.7881643022175034e-05,
"loss": 0.6752,
"step": 991
},
{
"epoch": 0.8770999115826702,
"grad_norm": 0.2760465741157532,
"learning_rate": 1.7877323217824143e-05,
"loss": 0.6616,
"step": 992
},
{
"epoch": 0.8779840848806366,
"grad_norm": 0.2735520601272583,
"learning_rate": 1.7872999536344576e-05,
"loss": 0.6527,
"step": 993
},
{
"epoch": 0.878868258178603,
"grad_norm": 0.27460673451423645,
"learning_rate": 1.78686719798644e-05,
"loss": 0.6807,
"step": 994
},
{
"epoch": 0.8797524314765695,
"grad_norm": 0.27252066135406494,
"learning_rate": 1.7864340550513594e-05,
"loss": 0.676,
"step": 995
},
{
"epoch": 0.8806366047745358,
"grad_norm": 0.25362223386764526,
"learning_rate": 1.7860005250424036e-05,
"loss": 0.6546,
"step": 996
},
{
"epoch": 0.8815207780725022,
"grad_norm": 0.2798359990119934,
"learning_rate": 1.7855666081729507e-05,
"loss": 0.6951,
"step": 997
},
{
"epoch": 0.8824049513704686,
"grad_norm": 0.2605760097503662,
"learning_rate": 1.78513230465657e-05,
"loss": 0.6718,
"step": 998
},
{
"epoch": 0.883289124668435,
"grad_norm": 0.25717878341674805,
"learning_rate": 1.78469761470702e-05,
"loss": 0.6745,
"step": 999
},
{
"epoch": 0.8841732979664014,
"grad_norm": 0.2649955451488495,
"learning_rate": 1.7842625385382514e-05,
"loss": 0.6969,
"step": 1000
},
{
"epoch": 0.8841732979664014,
"eval_loss": 0.5903281569480896,
"eval_runtime": 636.4744,
"eval_samples_per_second": 9.477,
"eval_steps_per_second": 1.185,
"step": 1000
},
{
"epoch": 0.8850574712643678,
"grad_norm": 0.25965937972068787,
"learning_rate": 1.783827076364403e-05,
"loss": 0.6798,
"step": 1001
},
{
"epoch": 0.8859416445623343,
"grad_norm": 0.26029160618782043,
"learning_rate": 1.7833912283998046e-05,
"loss": 0.6866,
"step": 1002
},
{
"epoch": 0.8868258178603006,
"grad_norm": 0.2580418288707733,
"learning_rate": 1.782954994858975e-05,
"loss": 0.6871,
"step": 1003
},
{
"epoch": 0.887709991158267,
"grad_norm": 0.25958773493766785,
"learning_rate": 1.782518375956624e-05,
"loss": 0.6873,
"step": 1004
},
{
"epoch": 0.8885941644562334,
"grad_norm": 0.25157105922698975,
"learning_rate": 1.7820813719076504e-05,
"loss": 0.6834,
"step": 1005
},
{
"epoch": 0.8894783377541998,
"grad_norm": 0.27329814434051514,
"learning_rate": 1.7816439829271423e-05,
"loss": 0.6572,
"step": 1006
},
{
"epoch": 0.8903625110521662,
"grad_norm": 0.27335214614868164,
"learning_rate": 1.7812062092303777e-05,
"loss": 0.6745,
"step": 1007
},
{
"epoch": 0.8912466843501327,
"grad_norm": 0.26897189021110535,
"learning_rate": 1.7807680510328238e-05,
"loss": 0.6821,
"step": 1008
},
{
"epoch": 0.8921308576480991,
"grad_norm": 0.26581960916519165,
"learning_rate": 1.780329508550137e-05,
"loss": 0.676,
"step": 1009
},
{
"epoch": 0.8930150309460654,
"grad_norm": 0.25833389163017273,
"learning_rate": 1.7798905819981636e-05,
"loss": 0.6517,
"step": 1010
},
{
"epoch": 0.8938992042440318,
"grad_norm": 0.25903475284576416,
"learning_rate": 1.779451271592937e-05,
"loss": 0.6893,
"step": 1011
},
{
"epoch": 0.8947833775419982,
"grad_norm": 0.2810676395893097,
"learning_rate": 1.7790115775506817e-05,
"loss": 0.6711,
"step": 1012
},
{
"epoch": 0.8956675508399646,
"grad_norm": 0.28034377098083496,
"learning_rate": 1.7785715000878098e-05,
"loss": 0.6519,
"step": 1013
},
{
"epoch": 0.896551724137931,
"grad_norm": 0.2632770538330078,
"learning_rate": 1.7781310394209224e-05,
"loss": 0.678,
"step": 1014
},
{
"epoch": 0.8974358974358975,
"grad_norm": 0.2921956479549408,
"learning_rate": 1.7776901957668097e-05,
"loss": 0.6862,
"step": 1015
},
{
"epoch": 0.8983200707338639,
"grad_norm": 0.2779308557510376,
"learning_rate": 1.777248969342449e-05,
"loss": 0.6623,
"step": 1016
},
{
"epoch": 0.8992042440318302,
"grad_norm": 0.288036048412323,
"learning_rate": 1.7768073603650074e-05,
"loss": 0.6797,
"step": 1017
},
{
"epoch": 0.9000884173297966,
"grad_norm": 0.28232020139694214,
"learning_rate": 1.77636536905184e-05,
"loss": 0.679,
"step": 1018
},
{
"epoch": 0.900972590627763,
"grad_norm": 0.27721941471099854,
"learning_rate": 1.7759229956204896e-05,
"loss": 0.6764,
"step": 1019
},
{
"epoch": 0.9018567639257294,
"grad_norm": 0.2887219190597534,
"learning_rate": 1.7754802402886878e-05,
"loss": 0.6841,
"step": 1020
},
{
"epoch": 0.9027409372236959,
"grad_norm": 0.266539603471756,
"learning_rate": 1.775037103274353e-05,
"loss": 0.6839,
"step": 1021
},
{
"epoch": 0.9036251105216623,
"grad_norm": 0.3014654219150543,
"learning_rate": 1.7745935847955923e-05,
"loss": 0.6769,
"step": 1022
},
{
"epoch": 0.9045092838196287,
"grad_norm": 0.26665180921554565,
"learning_rate": 1.7741496850707012e-05,
"loss": 0.6366,
"step": 1023
},
{
"epoch": 0.905393457117595,
"grad_norm": 0.2872043550014496,
"learning_rate": 1.7737054043181615e-05,
"loss": 0.6566,
"step": 1024
},
{
"epoch": 0.9062776304155614,
"grad_norm": 0.26761943101882935,
"learning_rate": 1.773260742756643e-05,
"loss": 0.6747,
"step": 1025
},
{
"epoch": 0.9071618037135278,
"grad_norm": 0.29005324840545654,
"learning_rate": 1.7728157006050034e-05,
"loss": 0.6744,
"step": 1026
},
{
"epoch": 0.9080459770114943,
"grad_norm": 0.2781924307346344,
"learning_rate": 1.7723702780822872e-05,
"loss": 0.6725,
"step": 1027
},
{
"epoch": 0.9089301503094607,
"grad_norm": 0.2801373302936554,
"learning_rate": 1.771924475407726e-05,
"loss": 0.6776,
"step": 1028
},
{
"epoch": 0.9098143236074271,
"grad_norm": 0.295177698135376,
"learning_rate": 1.7714782928007396e-05,
"loss": 0.67,
"step": 1029
},
{
"epoch": 0.9106984969053935,
"grad_norm": 0.2730737328529358,
"learning_rate": 1.771031730480933e-05,
"loss": 0.6671,
"step": 1030
},
{
"epoch": 0.9115826702033598,
"grad_norm": 0.2801671624183655,
"learning_rate": 1.7705847886681e-05,
"loss": 0.6843,
"step": 1031
},
{
"epoch": 0.9124668435013262,
"grad_norm": 0.30406612157821655,
"learning_rate": 1.77013746758222e-05,
"loss": 0.6879,
"step": 1032
},
{
"epoch": 0.9133510167992926,
"grad_norm": 0.25604161620140076,
"learning_rate": 1.7696897674434587e-05,
"loss": 0.6706,
"step": 1033
},
{
"epoch": 0.9142351900972591,
"grad_norm": 0.2719627618789673,
"learning_rate": 1.7692416884721696e-05,
"loss": 0.6593,
"step": 1034
},
{
"epoch": 0.9151193633952255,
"grad_norm": 0.25450173020362854,
"learning_rate": 1.768793230888892e-05,
"loss": 0.6486,
"step": 1035
},
{
"epoch": 0.9160035366931919,
"grad_norm": 0.2738204002380371,
"learning_rate": 1.7683443949143516e-05,
"loss": 0.6674,
"step": 1036
},
{
"epoch": 0.9168877099911583,
"grad_norm": 0.2541281282901764,
"learning_rate": 1.7678951807694603e-05,
"loss": 0.6572,
"step": 1037
},
{
"epoch": 0.9177718832891246,
"grad_norm": 0.25642985105514526,
"learning_rate": 1.767445588675316e-05,
"loss": 0.6659,
"step": 1038
},
{
"epoch": 0.918656056587091,
"grad_norm": 0.2713623344898224,
"learning_rate": 1.7669956188532033e-05,
"loss": 0.6557,
"step": 1039
},
{
"epoch": 0.9195402298850575,
"grad_norm": 0.27054524421691895,
"learning_rate": 1.7665452715245918e-05,
"loss": 0.6694,
"step": 1040
},
{
"epoch": 0.9204244031830239,
"grad_norm": 0.26055777072906494,
"learning_rate": 1.7660945469111375e-05,
"loss": 0.6609,
"step": 1041
},
{
"epoch": 0.9213085764809903,
"grad_norm": 0.2656017541885376,
"learning_rate": 1.7656434452346814e-05,
"loss": 0.6734,
"step": 1042
},
{
"epoch": 0.9221927497789567,
"grad_norm": 0.2734725773334503,
"learning_rate": 1.7651919667172516e-05,
"loss": 0.6653,
"step": 1043
},
{
"epoch": 0.9230769230769231,
"grad_norm": 0.2713887095451355,
"learning_rate": 1.76474011158106e-05,
"loss": 0.6738,
"step": 1044
},
{
"epoch": 0.9239610963748894,
"grad_norm": 0.2622493505477905,
"learning_rate": 1.7642878800485044e-05,
"loss": 0.6587,
"step": 1045
},
{
"epoch": 0.9248452696728559,
"grad_norm": 0.27088847756385803,
"learning_rate": 1.7638352723421684e-05,
"loss": 0.69,
"step": 1046
},
{
"epoch": 0.9257294429708223,
"grad_norm": 0.2664904296398163,
"learning_rate": 1.7633822886848203e-05,
"loss": 0.693,
"step": 1047
},
{
"epoch": 0.9266136162687887,
"grad_norm": 0.25941723585128784,
"learning_rate": 1.7629289292994135e-05,
"loss": 0.6407,
"step": 1048
},
{
"epoch": 0.9274977895667551,
"grad_norm": 0.2619355618953705,
"learning_rate": 1.762475194409086e-05,
"loss": 0.6853,
"step": 1049
},
{
"epoch": 0.9283819628647215,
"grad_norm": 0.2604518532752991,
"learning_rate": 1.7620210842371615e-05,
"loss": 0.6592,
"step": 1050
},
{
"epoch": 0.9292661361626879,
"grad_norm": 0.2798643410205841,
"learning_rate": 1.7615665990071475e-05,
"loss": 0.6582,
"step": 1051
},
{
"epoch": 0.9301503094606542,
"grad_norm": 0.2659445106983185,
"learning_rate": 1.761111738942736e-05,
"loss": 0.6651,
"step": 1052
},
{
"epoch": 0.9310344827586207,
"grad_norm": 0.2766883373260498,
"learning_rate": 1.760656504267805e-05,
"loss": 0.6689,
"step": 1053
},
{
"epoch": 0.9319186560565871,
"grad_norm": 0.2594150900840759,
"learning_rate": 1.7602008952064145e-05,
"loss": 0.686,
"step": 1054
},
{
"epoch": 0.9328028293545535,
"grad_norm": 0.2773495316505432,
"learning_rate": 1.7597449119828117e-05,
"loss": 0.6646,
"step": 1055
},
{
"epoch": 0.9336870026525199,
"grad_norm": 0.2599293887615204,
"learning_rate": 1.7592885548214246e-05,
"loss": 0.6568,
"step": 1056
},
{
"epoch": 0.9345711759504863,
"grad_norm": 0.3243758976459503,
"learning_rate": 1.7588318239468684e-05,
"loss": 0.6688,
"step": 1057
},
{
"epoch": 0.9354553492484527,
"grad_norm": 0.2688482999801636,
"learning_rate": 1.7583747195839402e-05,
"loss": 0.6956,
"step": 1058
},
{
"epoch": 0.9363395225464191,
"grad_norm": 0.2633262276649475,
"learning_rate": 1.7579172419576215e-05,
"loss": 0.6565,
"step": 1059
},
{
"epoch": 0.9372236958443855,
"grad_norm": 0.2557532489299774,
"learning_rate": 1.757459391293078e-05,
"loss": 0.6837,
"step": 1060
},
{
"epoch": 0.9381078691423519,
"grad_norm": 0.29793909192085266,
"learning_rate": 1.757001167815658e-05,
"loss": 0.6935,
"step": 1061
},
{
"epoch": 0.9389920424403183,
"grad_norm": 0.2790304720401764,
"learning_rate": 1.7565425717508943e-05,
"loss": 0.6561,
"step": 1062
},
{
"epoch": 0.9398762157382847,
"grad_norm": 0.27901366353034973,
"learning_rate": 1.7560836033245025e-05,
"loss": 0.6756,
"step": 1063
},
{
"epoch": 0.9407603890362511,
"grad_norm": 0.2834090292453766,
"learning_rate": 1.7556242627623816e-05,
"loss": 0.6729,
"step": 1064
},
{
"epoch": 0.9416445623342176,
"grad_norm": 0.2834383547306061,
"learning_rate": 1.7551645502906137e-05,
"loss": 0.6875,
"step": 1065
},
{
"epoch": 0.9425287356321839,
"grad_norm": 0.26382672786712646,
"learning_rate": 1.754704466135464e-05,
"loss": 0.6418,
"step": 1066
},
{
"epoch": 0.9434129089301503,
"grad_norm": 0.27543070912361145,
"learning_rate": 1.7542440105233814e-05,
"loss": 0.6899,
"step": 1067
},
{
"epoch": 0.9442970822281167,
"grad_norm": 0.28698647022247314,
"learning_rate": 1.753783183680996e-05,
"loss": 0.6715,
"step": 1068
},
{
"epoch": 0.9451812555260831,
"grad_norm": 0.28023576736450195,
"learning_rate": 1.7533219858351217e-05,
"loss": 0.6606,
"step": 1069
},
{
"epoch": 0.9460654288240495,
"grad_norm": 0.27260106801986694,
"learning_rate": 1.7528604172127552e-05,
"loss": 0.6804,
"step": 1070
},
{
"epoch": 0.946949602122016,
"grad_norm": 0.26769205927848816,
"learning_rate": 1.7523984780410753e-05,
"loss": 0.6555,
"step": 1071
},
{
"epoch": 0.9478337754199824,
"grad_norm": 0.26485905051231384,
"learning_rate": 1.7519361685474427e-05,
"loss": 0.6571,
"step": 1072
},
{
"epoch": 0.9487179487179487,
"grad_norm": 0.27315646409988403,
"learning_rate": 1.7514734889594008e-05,
"loss": 0.6948,
"step": 1073
},
{
"epoch": 0.9496021220159151,
"grad_norm": 0.2641676366329193,
"learning_rate": 1.7510104395046762e-05,
"loss": 0.6807,
"step": 1074
},
{
"epoch": 0.9504862953138815,
"grad_norm": 0.28239792585372925,
"learning_rate": 1.7505470204111757e-05,
"loss": 0.6637,
"step": 1075
},
{
"epoch": 0.9513704686118479,
"grad_norm": 0.2668689787387848,
"learning_rate": 1.750083231906989e-05,
"loss": 0.6746,
"step": 1076
},
{
"epoch": 0.9522546419098143,
"grad_norm": 0.2757757604122162,
"learning_rate": 1.7496190742203878e-05,
"loss": 0.6781,
"step": 1077
},
{
"epoch": 0.9531388152077808,
"grad_norm": 0.254616379737854,
"learning_rate": 1.749154547579825e-05,
"loss": 0.6561,
"step": 1078
},
{
"epoch": 0.9540229885057471,
"grad_norm": 0.2608535587787628,
"learning_rate": 1.748689652213935e-05,
"loss": 0.6693,
"step": 1079
},
{
"epoch": 0.9549071618037135,
"grad_norm": 0.25192496180534363,
"learning_rate": 1.7482243883515352e-05,
"loss": 0.6785,
"step": 1080
},
{
"epoch": 0.9557913351016799,
"grad_norm": 0.27883705496788025,
"learning_rate": 1.7477587562216216e-05,
"loss": 0.6826,
"step": 1081
},
{
"epoch": 0.9566755083996463,
"grad_norm": 0.24686013162136078,
"learning_rate": 1.747292756053374e-05,
"loss": 0.6581,
"step": 1082
},
{
"epoch": 0.9575596816976127,
"grad_norm": 0.2555716335773468,
"learning_rate": 1.7468263880761524e-05,
"loss": 0.6445,
"step": 1083
},
{
"epoch": 0.9584438549955792,
"grad_norm": 0.25589719414711,
"learning_rate": 1.7463596525194972e-05,
"loss": 0.6623,
"step": 1084
},
{
"epoch": 0.9593280282935456,
"grad_norm": 0.27019718289375305,
"learning_rate": 1.745892549613131e-05,
"loss": 0.687,
"step": 1085
},
{
"epoch": 0.9602122015915119,
"grad_norm": 0.2520875632762909,
"learning_rate": 1.7454250795869555e-05,
"loss": 0.6476,
"step": 1086
},
{
"epoch": 0.9610963748894783,
"grad_norm": 0.2655462622642517,
"learning_rate": 1.7449572426710553e-05,
"loss": 0.6602,
"step": 1087
},
{
"epoch": 0.9619805481874447,
"grad_norm": 0.2607521116733551,
"learning_rate": 1.744489039095694e-05,
"loss": 0.6757,
"step": 1088
},
{
"epoch": 0.9628647214854111,
"grad_norm": 0.27073025703430176,
"learning_rate": 1.7440204690913158e-05,
"loss": 0.6703,
"step": 1089
},
{
"epoch": 0.9637488947833776,
"grad_norm": 0.2549103796482086,
"learning_rate": 1.7435515328885455e-05,
"loss": 0.6825,
"step": 1090
},
{
"epoch": 0.964633068081344,
"grad_norm": 0.25703832507133484,
"learning_rate": 1.7430822307181885e-05,
"loss": 0.6763,
"step": 1091
},
{
"epoch": 0.9655172413793104,
"grad_norm": 0.2660679519176483,
"learning_rate": 1.7426125628112296e-05,
"loss": 0.6448,
"step": 1092
},
{
"epoch": 0.9664014146772767,
"grad_norm": 0.26405152678489685,
"learning_rate": 1.7421425293988345e-05,
"loss": 0.6655,
"step": 1093
},
{
"epoch": 0.9672855879752431,
"grad_norm": 0.2685000002384186,
"learning_rate": 1.7416721307123477e-05,
"loss": 0.6474,
"step": 1094
},
{
"epoch": 0.9681697612732095,
"grad_norm": 0.25884756445884705,
"learning_rate": 1.7412013669832944e-05,
"loss": 0.6488,
"step": 1095
},
{
"epoch": 0.969053934571176,
"grad_norm": 0.2860439419746399,
"learning_rate": 1.740730238443379e-05,
"loss": 0.6907,
"step": 1096
},
{
"epoch": 0.9699381078691424,
"grad_norm": 0.263041228055954,
"learning_rate": 1.7402587453244863e-05,
"loss": 0.6638,
"step": 1097
},
{
"epoch": 0.9708222811671088,
"grad_norm": 0.26806530356407166,
"learning_rate": 1.7397868878586782e-05,
"loss": 0.6472,
"step": 1098
},
{
"epoch": 0.9717064544650752,
"grad_norm": 0.2857159674167633,
"learning_rate": 1.739314666278199e-05,
"loss": 0.6857,
"step": 1099
},
{
"epoch": 0.9725906277630415,
"grad_norm": 0.25429031252861023,
"learning_rate": 1.738842080815471e-05,
"loss": 0.6523,
"step": 1100
},
{
"epoch": 0.9734748010610079,
"grad_norm": 0.27628904581069946,
"learning_rate": 1.738369131703094e-05,
"loss": 0.6567,
"step": 1101
},
{
"epoch": 0.9743589743589743,
"grad_norm": 0.25353360176086426,
"learning_rate": 1.7378958191738494e-05,
"loss": 0.6565,
"step": 1102
},
{
"epoch": 0.9752431476569408,
"grad_norm": 0.2707658112049103,
"learning_rate": 1.737422143460695e-05,
"loss": 0.6585,
"step": 1103
},
{
"epoch": 0.9761273209549072,
"grad_norm": 0.24392253160476685,
"learning_rate": 1.73694810479677e-05,
"loss": 0.6409,
"step": 1104
},
{
"epoch": 0.9770114942528736,
"grad_norm": 0.28220134973526,
"learning_rate": 1.7364737034153898e-05,
"loss": 0.6622,
"step": 1105
},
{
"epoch": 0.97789566755084,
"grad_norm": 0.2695649266242981,
"learning_rate": 1.73599893955005e-05,
"loss": 0.6944,
"step": 1106
},
{
"epoch": 0.9787798408488063,
"grad_norm": 0.2707299590110779,
"learning_rate": 1.7355238134344233e-05,
"loss": 0.6598,
"step": 1107
},
{
"epoch": 0.9796640141467727,
"grad_norm": 0.26879486441612244,
"learning_rate": 1.7350483253023615e-05,
"loss": 0.6911,
"step": 1108
},
{
"epoch": 0.9805481874447391,
"grad_norm": 0.2739331126213074,
"learning_rate": 1.7345724753878952e-05,
"loss": 0.6358,
"step": 1109
},
{
"epoch": 0.9814323607427056,
"grad_norm": 0.27410033345222473,
"learning_rate": 1.7340962639252318e-05,
"loss": 0.6833,
"step": 1110
},
{
"epoch": 0.982316534040672,
"grad_norm": 0.24812832474708557,
"learning_rate": 1.7336196911487564e-05,
"loss": 0.6434,
"step": 1111
},
{
"epoch": 0.9832007073386384,
"grad_norm": 0.26902830600738525,
"learning_rate": 1.733142757293034e-05,
"loss": 0.6643,
"step": 1112
},
{
"epoch": 0.9840848806366048,
"grad_norm": 0.2602077126502991,
"learning_rate": 1.732665462592805e-05,
"loss": 0.6857,
"step": 1113
},
{
"epoch": 0.9849690539345711,
"grad_norm": 0.26925286650657654,
"learning_rate": 1.7321878072829893e-05,
"loss": 0.6314,
"step": 1114
},
{
"epoch": 0.9858532272325375,
"grad_norm": 0.2681903839111328,
"learning_rate": 1.7317097915986822e-05,
"loss": 0.6449,
"step": 1115
},
{
"epoch": 0.986737400530504,
"grad_norm": 0.2796882688999176,
"learning_rate": 1.7312314157751587e-05,
"loss": 0.6912,
"step": 1116
},
{
"epoch": 0.9876215738284704,
"grad_norm": 0.2725301682949066,
"learning_rate": 1.7307526800478696e-05,
"loss": 0.6985,
"step": 1117
},
{
"epoch": 0.9885057471264368,
"grad_norm": 0.2532489001750946,
"learning_rate": 1.730273584652443e-05,
"loss": 0.6625,
"step": 1118
},
{
"epoch": 0.9893899204244032,
"grad_norm": 0.26952359080314636,
"learning_rate": 1.729794129824684e-05,
"loss": 0.6746,
"step": 1119
},
{
"epoch": 0.9902740937223696,
"grad_norm": 0.2754993140697479,
"learning_rate": 1.7293143158005753e-05,
"loss": 0.6685,
"step": 1120
},
{
"epoch": 0.9911582670203359,
"grad_norm": 0.2609037160873413,
"learning_rate": 1.7288341428162755e-05,
"loss": 0.6412,
"step": 1121
},
{
"epoch": 0.9920424403183024,
"grad_norm": 0.2644977569580078,
"learning_rate": 1.7283536111081205e-05,
"loss": 0.6785,
"step": 1122
},
{
"epoch": 0.9929266136162688,
"grad_norm": 0.278082013130188,
"learning_rate": 1.7278727209126226e-05,
"loss": 0.6701,
"step": 1123
},
{
"epoch": 0.9938107869142352,
"grad_norm": 0.25577571988105774,
"learning_rate": 1.72739147246647e-05,
"loss": 0.6589,
"step": 1124
},
{
"epoch": 0.9946949602122016,
"grad_norm": 0.28002941608428955,
"learning_rate": 1.726909866006528e-05,
"loss": 0.6563,
"step": 1125
},
{
"epoch": 0.995579133510168,
"grad_norm": 0.27407336235046387,
"learning_rate": 1.7264279017698383e-05,
"loss": 0.6782,
"step": 1126
},
{
"epoch": 0.9964633068081344,
"grad_norm": 0.27307671308517456,
"learning_rate": 1.725945579993617e-05,
"loss": 0.6895,
"step": 1127
},
{
"epoch": 0.9973474801061007,
"grad_norm": 0.274506151676178,
"learning_rate": 1.7254629009152586e-05,
"loss": 0.6529,
"step": 1128
},
{
"epoch": 0.9982316534040672,
"grad_norm": 0.2684597074985504,
"learning_rate": 1.7249798647723316e-05,
"loss": 0.6505,
"step": 1129
},
{
"epoch": 0.9991158267020336,
"grad_norm": 0.26910215616226196,
"learning_rate": 1.724496471802581e-05,
"loss": 0.6567,
"step": 1130
},
{
"epoch": 1.0,
"grad_norm": 0.25736430287361145,
"learning_rate": 1.7240127222439277e-05,
"loss": 0.6583,
"step": 1131
},
{
"epoch": 1.0008841732979663,
"grad_norm": 0.3731796145439148,
"learning_rate": 1.723528616334467e-05,
"loss": 0.6282,
"step": 1132
},
{
"epoch": 1.0017683465959328,
"grad_norm": 0.31906217336654663,
"learning_rate": 1.7230441543124704e-05,
"loss": 0.6119,
"step": 1133
},
{
"epoch": 1.0026525198938991,
"grad_norm": 0.3216983675956726,
"learning_rate": 1.722559336416385e-05,
"loss": 0.6273,
"step": 1134
},
{
"epoch": 1.0035366931918657,
"grad_norm": 0.3276985287666321,
"learning_rate": 1.7220741628848327e-05,
"loss": 0.6208,
"step": 1135
},
{
"epoch": 1.004420866489832,
"grad_norm": 0.3101070523262024,
"learning_rate": 1.72158863395661e-05,
"loss": 0.6378,
"step": 1136
},
{
"epoch": 1.0053050397877985,
"grad_norm": 0.30408746004104614,
"learning_rate": 1.721102749870689e-05,
"loss": 0.615,
"step": 1137
},
{
"epoch": 1.0061892130857648,
"grad_norm": 0.29603374004364014,
"learning_rate": 1.7206165108662163e-05,
"loss": 0.6541,
"step": 1138
},
{
"epoch": 1.0070733863837311,
"grad_norm": 0.3172050714492798,
"learning_rate": 1.7201299171825127e-05,
"loss": 0.6249,
"step": 1139
},
{
"epoch": 1.0079575596816976,
"grad_norm": 0.27763667702674866,
"learning_rate": 1.7196429690590745e-05,
"loss": 0.628,
"step": 1140
},
{
"epoch": 1.008841732979664,
"grad_norm": 0.2981289327144623,
"learning_rate": 1.719155666735572e-05,
"loss": 0.6249,
"step": 1141
},
{
"epoch": 1.0097259062776305,
"grad_norm": 0.27837640047073364,
"learning_rate": 1.7186680104518497e-05,
"loss": 0.6395,
"step": 1142
},
{
"epoch": 1.0106100795755968,
"grad_norm": 0.2705881893634796,
"learning_rate": 1.718180000447927e-05,
"loss": 0.6442,
"step": 1143
},
{
"epoch": 1.0114942528735633,
"grad_norm": 0.2696738541126251,
"learning_rate": 1.717691636963996e-05,
"loss": 0.6171,
"step": 1144
},
{
"epoch": 1.0123784261715296,
"grad_norm": 0.2598719000816345,
"learning_rate": 1.7172029202404242e-05,
"loss": 0.6418,
"step": 1145
},
{
"epoch": 1.013262599469496,
"grad_norm": 0.26264703273773193,
"learning_rate": 1.7167138505177522e-05,
"loss": 0.6474,
"step": 1146
},
{
"epoch": 1.0141467727674625,
"grad_norm": 0.2645900547504425,
"learning_rate": 1.7162244280366947e-05,
"loss": 0.6072,
"step": 1147
},
{
"epoch": 1.0150309460654288,
"grad_norm": 0.2668091952800751,
"learning_rate": 1.7157346530381394e-05,
"loss": 0.6205,
"step": 1148
},
{
"epoch": 1.0159151193633953,
"grad_norm": 0.2574772834777832,
"learning_rate": 1.7152445257631486e-05,
"loss": 0.6378,
"step": 1149
},
{
"epoch": 1.0167992926613616,
"grad_norm": 0.25622695684432983,
"learning_rate": 1.7147540464529573e-05,
"loss": 0.5999,
"step": 1150
},
{
"epoch": 1.0176834659593281,
"grad_norm": 0.2735171318054199,
"learning_rate": 1.714263215348973e-05,
"loss": 0.6315,
"step": 1151
},
{
"epoch": 1.0185676392572944,
"grad_norm": 0.2734070122241974,
"learning_rate": 1.7137720326927784e-05,
"loss": 0.6446,
"step": 1152
},
{
"epoch": 1.0194518125552607,
"grad_norm": 0.2729871869087219,
"learning_rate": 1.7132804987261268e-05,
"loss": 0.6359,
"step": 1153
},
{
"epoch": 1.0203359858532273,
"grad_norm": 0.265567809343338,
"learning_rate": 1.712788613690947e-05,
"loss": 0.6488,
"step": 1154
},
{
"epoch": 1.0212201591511936,
"grad_norm": 0.29482394456863403,
"learning_rate": 1.7122963778293375e-05,
"loss": 0.631,
"step": 1155
},
{
"epoch": 1.02210433244916,
"grad_norm": 0.2810860276222229,
"learning_rate": 1.7118037913835728e-05,
"loss": 0.6182,
"step": 1156
},
{
"epoch": 1.0229885057471264,
"grad_norm": 0.27755916118621826,
"learning_rate": 1.7113108545960974e-05,
"loss": 0.6413,
"step": 1157
},
{
"epoch": 1.023872679045093,
"grad_norm": 0.2944599986076355,
"learning_rate": 1.710817567709529e-05,
"loss": 0.6569,
"step": 1158
},
{
"epoch": 1.0247568523430592,
"grad_norm": 0.26788049936294556,
"learning_rate": 1.7103239309666585e-05,
"loss": 0.6425,
"step": 1159
},
{
"epoch": 1.0256410256410255,
"grad_norm": 0.3181128203868866,
"learning_rate": 1.709829944610448e-05,
"loss": 0.6483,
"step": 1160
},
{
"epoch": 1.026525198938992,
"grad_norm": 0.274090439081192,
"learning_rate": 1.709335608884032e-05,
"loss": 0.6616,
"step": 1161
},
{
"epoch": 1.0274093722369584,
"grad_norm": 0.31790080666542053,
"learning_rate": 1.7088409240307166e-05,
"loss": 0.6276,
"step": 1162
},
{
"epoch": 1.028293545534925,
"grad_norm": 0.2720988988876343,
"learning_rate": 1.70834589029398e-05,
"loss": 0.6417,
"step": 1163
},
{
"epoch": 1.0291777188328912,
"grad_norm": 0.2880362570285797,
"learning_rate": 1.7078505079174727e-05,
"loss": 0.6123,
"step": 1164
},
{
"epoch": 1.0300618921308577,
"grad_norm": 0.2741018235683441,
"learning_rate": 1.707354777145016e-05,
"loss": 0.605,
"step": 1165
},
{
"epoch": 1.030946065428824,
"grad_norm": 0.2825055420398712,
"learning_rate": 1.706858698220603e-05,
"loss": 0.6401,
"step": 1166
},
{
"epoch": 1.0318302387267904,
"grad_norm": 0.2906399071216583,
"learning_rate": 1.7063622713883976e-05,
"loss": 0.6511,
"step": 1167
},
{
"epoch": 1.032714412024757,
"grad_norm": 0.2705128490924835,
"learning_rate": 1.7058654968927362e-05,
"loss": 0.6161,
"step": 1168
},
{
"epoch": 1.0335985853227232,
"grad_norm": 0.267473965883255,
"learning_rate": 1.7053683749781248e-05,
"loss": 0.628,
"step": 1169
},
{
"epoch": 1.0344827586206897,
"grad_norm": 0.293056458234787,
"learning_rate": 1.7048709058892415e-05,
"loss": 0.6468,
"step": 1170
},
{
"epoch": 1.035366931918656,
"grad_norm": 0.2743586301803589,
"learning_rate": 1.7043730898709353e-05,
"loss": 0.6392,
"step": 1171
},
{
"epoch": 1.0362511052166226,
"grad_norm": 0.28495079278945923,
"learning_rate": 1.7038749271682247e-05,
"loss": 0.6089,
"step": 1172
},
{
"epoch": 1.0371352785145889,
"grad_norm": 0.27104368805885315,
"learning_rate": 1.703376418026301e-05,
"loss": 0.6468,
"step": 1173
},
{
"epoch": 1.0380194518125552,
"grad_norm": 0.2809831500053406,
"learning_rate": 1.7028775626905232e-05,
"loss": 0.6226,
"step": 1174
},
{
"epoch": 1.0389036251105217,
"grad_norm": 0.25932061672210693,
"learning_rate": 1.702378361406423e-05,
"loss": 0.618,
"step": 1175
},
{
"epoch": 1.039787798408488,
"grad_norm": 0.3152928352355957,
"learning_rate": 1.701878814419702e-05,
"loss": 0.5957,
"step": 1176
},
{
"epoch": 1.0406719717064545,
"grad_norm": 0.2770199179649353,
"learning_rate": 1.70137892197623e-05,
"loss": 0.6208,
"step": 1177
},
{
"epoch": 1.0415561450044208,
"grad_norm": 0.3086700439453125,
"learning_rate": 1.7008786843220503e-05,
"loss": 0.6152,
"step": 1178
},
{
"epoch": 1.0424403183023874,
"grad_norm": 0.2571791410446167,
"learning_rate": 1.7003781017033727e-05,
"loss": 0.6421,
"step": 1179
},
{
"epoch": 1.0433244916003537,
"grad_norm": 0.29180416464805603,
"learning_rate": 1.699877174366579e-05,
"loss": 0.6197,
"step": 1180
},
{
"epoch": 1.04420866489832,
"grad_norm": 0.26762959361076355,
"learning_rate": 1.69937590255822e-05,
"loss": 0.6305,
"step": 1181
},
{
"epoch": 1.0450928381962865,
"grad_norm": 0.2819135785102844,
"learning_rate": 1.6988742865250153e-05,
"loss": 0.6042,
"step": 1182
},
{
"epoch": 1.0459770114942528,
"grad_norm": 0.28463149070739746,
"learning_rate": 1.6983723265138552e-05,
"loss": 0.6315,
"step": 1183
},
{
"epoch": 1.0468611847922193,
"grad_norm": 0.28508734703063965,
"learning_rate": 1.6978700227717985e-05,
"loss": 0.635,
"step": 1184
},
{
"epoch": 1.0477453580901857,
"grad_norm": 0.2717869281768799,
"learning_rate": 1.697367375546073e-05,
"loss": 0.6095,
"step": 1185
},
{
"epoch": 1.0486295313881522,
"grad_norm": 0.2838262617588043,
"learning_rate": 1.6968643850840767e-05,
"loss": 0.6199,
"step": 1186
},
{
"epoch": 1.0495137046861185,
"grad_norm": 0.2623160183429718,
"learning_rate": 1.696361051633375e-05,
"loss": 0.6249,
"step": 1187
},
{
"epoch": 1.0503978779840848,
"grad_norm": 0.2898575961589813,
"learning_rate": 1.6958573754417037e-05,
"loss": 0.6224,
"step": 1188
},
{
"epoch": 1.0512820512820513,
"grad_norm": 0.2627337574958801,
"learning_rate": 1.695353356756966e-05,
"loss": 0.6104,
"step": 1189
},
{
"epoch": 1.0521662245800176,
"grad_norm": 0.2779299020767212,
"learning_rate": 1.694848995827234e-05,
"loss": 0.6219,
"step": 1190
},
{
"epoch": 1.0530503978779842,
"grad_norm": 0.28515782952308655,
"learning_rate": 1.6943442929007484e-05,
"loss": 0.6226,
"step": 1191
},
{
"epoch": 1.0539345711759505,
"grad_norm": 0.2683526575565338,
"learning_rate": 1.6938392482259186e-05,
"loss": 0.6382,
"step": 1192
},
{
"epoch": 1.054818744473917,
"grad_norm": 0.26224249601364136,
"learning_rate": 1.6933338620513216e-05,
"loss": 0.6166,
"step": 1193
},
{
"epoch": 1.0557029177718833,
"grad_norm": 0.27818483114242554,
"learning_rate": 1.6928281346257026e-05,
"loss": 0.6493,
"step": 1194
},
{
"epoch": 1.0565870910698496,
"grad_norm": 0.25754284858703613,
"learning_rate": 1.6923220661979754e-05,
"loss": 0.6196,
"step": 1195
},
{
"epoch": 1.0574712643678161,
"grad_norm": 0.2612115144729614,
"learning_rate": 1.6918156570172205e-05,
"loss": 0.6261,
"step": 1196
},
{
"epoch": 1.0583554376657824,
"grad_norm": 0.2732231318950653,
"learning_rate": 1.691308907332687e-05,
"loss": 0.6399,
"step": 1197
},
{
"epoch": 1.059239610963749,
"grad_norm": 0.26607832312583923,
"learning_rate": 1.690801817393791e-05,
"loss": 0.6403,
"step": 1198
},
{
"epoch": 1.0601237842617153,
"grad_norm": 0.24802426993846893,
"learning_rate": 1.6902943874501173e-05,
"loss": 0.5961,
"step": 1199
},
{
"epoch": 1.0610079575596818,
"grad_norm": 0.258173406124115,
"learning_rate": 1.689786617751416e-05,
"loss": 0.6031,
"step": 1200
},
{
"epoch": 1.061892130857648,
"grad_norm": 0.2684227228164673,
"learning_rate": 1.689278508547606e-05,
"loss": 0.6274,
"step": 1201
},
{
"epoch": 1.0627763041556144,
"grad_norm": 0.25794798135757446,
"learning_rate": 1.6887700600887732e-05,
"loss": 0.6042,
"step": 1202
},
{
"epoch": 1.063660477453581,
"grad_norm": 0.26139870285987854,
"learning_rate": 1.6882612726251696e-05,
"loss": 0.6197,
"step": 1203
},
{
"epoch": 1.0645446507515473,
"grad_norm": 0.2552439272403717,
"learning_rate": 1.687752146407215e-05,
"loss": 0.634,
"step": 1204
},
{
"epoch": 1.0654288240495138,
"grad_norm": 0.2660035490989685,
"learning_rate": 1.687242681685495e-05,
"loss": 0.6247,
"step": 1205
},
{
"epoch": 1.06631299734748,
"grad_norm": 0.2548738718032837,
"learning_rate": 1.686732878710763e-05,
"loss": 0.6219,
"step": 1206
},
{
"epoch": 1.0671971706454464,
"grad_norm": 0.25329524278640747,
"learning_rate": 1.686222737733938e-05,
"loss": 0.6228,
"step": 1207
},
{
"epoch": 1.068081343943413,
"grad_norm": 0.27623432874679565,
"learning_rate": 1.6857122590061052e-05,
"loss": 0.632,
"step": 1208
},
{
"epoch": 1.0689655172413792,
"grad_norm": 0.26089951395988464,
"learning_rate": 1.685201442778517e-05,
"loss": 0.6188,
"step": 1209
},
{
"epoch": 1.0698496905393458,
"grad_norm": 0.2587830722332001,
"learning_rate": 1.6846902893025906e-05,
"loss": 0.6127,
"step": 1210
},
{
"epoch": 1.070733863837312,
"grad_norm": 0.2646625339984894,
"learning_rate": 1.6841787988299106e-05,
"loss": 0.6184,
"step": 1211
},
{
"epoch": 1.0716180371352786,
"grad_norm": 0.26953622698783875,
"learning_rate": 1.6836669716122263e-05,
"loss": 0.6147,
"step": 1212
},
{
"epoch": 1.072502210433245,
"grad_norm": 0.2598692774772644,
"learning_rate": 1.6831548079014534e-05,
"loss": 0.5983,
"step": 1213
},
{
"epoch": 1.0733863837312114,
"grad_norm": 0.2633415758609772,
"learning_rate": 1.682642307949673e-05,
"loss": 0.5962,
"step": 1214
},
{
"epoch": 1.0742705570291777,
"grad_norm": 0.26447930932044983,
"learning_rate": 1.6821294720091323e-05,
"loss": 0.6383,
"step": 1215
},
{
"epoch": 1.075154730327144,
"grad_norm": 0.26669976115226746,
"learning_rate": 1.681616300332242e-05,
"loss": 0.6332,
"step": 1216
},
{
"epoch": 1.0760389036251106,
"grad_norm": 0.25877219438552856,
"learning_rate": 1.6811027931715807e-05,
"loss": 0.6188,
"step": 1217
},
{
"epoch": 1.0769230769230769,
"grad_norm": 0.26924005150794983,
"learning_rate": 1.68058895077989e-05,
"loss": 0.613,
"step": 1218
},
{
"epoch": 1.0778072502210434,
"grad_norm": 0.2654573917388916,
"learning_rate": 1.6800747734100774e-05,
"loss": 0.6223,
"step": 1219
},
{
"epoch": 1.0786914235190097,
"grad_norm": 0.2641811966896057,
"learning_rate": 1.6795602613152152e-05,
"loss": 0.6339,
"step": 1220
},
{
"epoch": 1.079575596816976,
"grad_norm": 0.26124563813209534,
"learning_rate": 1.6790454147485406e-05,
"loss": 0.6281,
"step": 1221
},
{
"epoch": 1.0804597701149425,
"grad_norm": 0.2645987570285797,
"learning_rate": 1.678530233963455e-05,
"loss": 0.6303,
"step": 1222
},
{
"epoch": 1.0813439434129088,
"grad_norm": 0.28587421774864197,
"learning_rate": 1.6780147192135242e-05,
"loss": 0.6292,
"step": 1223
},
{
"epoch": 1.0822281167108754,
"grad_norm": 0.27029022574424744,
"learning_rate": 1.677498870752479e-05,
"loss": 0.6206,
"step": 1224
},
{
"epoch": 1.0831122900088417,
"grad_norm": 0.3055930733680725,
"learning_rate": 1.6769826888342145e-05,
"loss": 0.6329,
"step": 1225
},
{
"epoch": 1.0839964633068082,
"grad_norm": 0.28340834379196167,
"learning_rate": 1.6764661737127887e-05,
"loss": 0.6461,
"step": 1226
},
{
"epoch": 1.0848806366047745,
"grad_norm": 0.3028965890407562,
"learning_rate": 1.6759493256424255e-05,
"loss": 0.621,
"step": 1227
},
{
"epoch": 1.085764809902741,
"grad_norm": 0.2685222327709198,
"learning_rate": 1.6754321448775105e-05,
"loss": 0.6174,
"step": 1228
},
{
"epoch": 1.0866489832007074,
"grad_norm": 0.31418630480766296,
"learning_rate": 1.674914631672595e-05,
"loss": 0.6381,
"step": 1229
},
{
"epoch": 1.0875331564986737,
"grad_norm": 0.26525503396987915,
"learning_rate": 1.674396786282393e-05,
"loss": 0.6159,
"step": 1230
},
{
"epoch": 1.0884173297966402,
"grad_norm": 0.3144836127758026,
"learning_rate": 1.6738786089617815e-05,
"loss": 0.641,
"step": 1231
},
{
"epoch": 1.0893015030946065,
"grad_norm": 0.2810137867927551,
"learning_rate": 1.673360099965802e-05,
"loss": 0.6448,
"step": 1232
},
{
"epoch": 1.090185676392573,
"grad_norm": 0.2875977158546448,
"learning_rate": 1.6728412595496586e-05,
"loss": 0.6481,
"step": 1233
},
{
"epoch": 1.0910698496905393,
"grad_norm": 0.2756240665912628,
"learning_rate": 1.672322087968719e-05,
"loss": 0.6583,
"step": 1234
},
{
"epoch": 1.0919540229885056,
"grad_norm": 0.27450910210609436,
"learning_rate": 1.6718025854785135e-05,
"loss": 0.6087,
"step": 1235
},
{
"epoch": 1.0928381962864722,
"grad_norm": 0.26873740553855896,
"learning_rate": 1.671282752334734e-05,
"loss": 0.6177,
"step": 1236
},
{
"epoch": 1.0937223695844385,
"grad_norm": 0.2861490547657013,
"learning_rate": 1.670762588793238e-05,
"loss": 0.612,
"step": 1237
},
{
"epoch": 1.094606542882405,
"grad_norm": 0.2778702676296234,
"learning_rate": 1.6702420951100434e-05,
"loss": 0.6029,
"step": 1238
},
{
"epoch": 1.0954907161803713,
"grad_norm": 0.2858835458755493,
"learning_rate": 1.6697212715413314e-05,
"loss": 0.6321,
"step": 1239
},
{
"epoch": 1.0963748894783378,
"grad_norm": 0.2903822958469391,
"learning_rate": 1.6692001183434453e-05,
"loss": 0.6473,
"step": 1240
},
{
"epoch": 1.0972590627763041,
"grad_norm": 0.2888301610946655,
"learning_rate": 1.668678635772891e-05,
"loss": 0.6469,
"step": 1241
},
{
"epoch": 1.0981432360742707,
"grad_norm": 0.3000604808330536,
"learning_rate": 1.6681568240863362e-05,
"loss": 0.6616,
"step": 1242
},
{
"epoch": 1.099027409372237,
"grad_norm": 0.2698865532875061,
"learning_rate": 1.6676346835406103e-05,
"loss": 0.637,
"step": 1243
},
{
"epoch": 1.0999115826702033,
"grad_norm": 0.26741912961006165,
"learning_rate": 1.6671122143927053e-05,
"loss": 0.6334,
"step": 1244
},
{
"epoch": 1.1007957559681698,
"grad_norm": 0.2745420038700104,
"learning_rate": 1.6665894168997746e-05,
"loss": 0.6463,
"step": 1245
},
{
"epoch": 1.1016799292661361,
"grad_norm": 0.26680004596710205,
"learning_rate": 1.6660662913191327e-05,
"loss": 0.6137,
"step": 1246
},
{
"epoch": 1.1025641025641026,
"grad_norm": 0.27459362149238586,
"learning_rate": 1.6655428379082565e-05,
"loss": 0.6223,
"step": 1247
},
{
"epoch": 1.103448275862069,
"grad_norm": 0.2799372673034668,
"learning_rate": 1.6650190569247836e-05,
"loss": 0.6343,
"step": 1248
},
{
"epoch": 1.1043324491600353,
"grad_norm": 0.3050251305103302,
"learning_rate": 1.6644949486265133e-05,
"loss": 0.6667,
"step": 1249
},
{
"epoch": 1.1052166224580018,
"grad_norm": 0.27570196986198425,
"learning_rate": 1.6639705132714054e-05,
"loss": 0.6335,
"step": 1250
},
{
"epoch": 1.106100795755968,
"grad_norm": 0.2715230882167816,
"learning_rate": 1.6634457511175816e-05,
"loss": 0.6347,
"step": 1251
},
{
"epoch": 1.1069849690539346,
"grad_norm": 0.25981900095939636,
"learning_rate": 1.6629206624233226e-05,
"loss": 0.6126,
"step": 1252
},
{
"epoch": 1.107869142351901,
"grad_norm": 0.2785845696926117,
"learning_rate": 1.6623952474470724e-05,
"loss": 0.6472,
"step": 1253
},
{
"epoch": 1.1087533156498675,
"grad_norm": 0.2734946012496948,
"learning_rate": 1.6618695064474336e-05,
"loss": 0.6109,
"step": 1254
},
{
"epoch": 1.1096374889478338,
"grad_norm": 0.2717086374759674,
"learning_rate": 1.66134343968317e-05,
"loss": 0.6471,
"step": 1255
},
{
"epoch": 1.1105216622458003,
"grad_norm": 0.2562820315361023,
"learning_rate": 1.6608170474132055e-05,
"loss": 0.62,
"step": 1256
},
{
"epoch": 1.1114058355437666,
"grad_norm": 0.285817950963974,
"learning_rate": 1.6602903298966247e-05,
"loss": 0.6508,
"step": 1257
},
{
"epoch": 1.112290008841733,
"grad_norm": 0.25098422169685364,
"learning_rate": 1.659763287392672e-05,
"loss": 0.616,
"step": 1258
},
{
"epoch": 1.1131741821396994,
"grad_norm": 0.268384724855423,
"learning_rate": 1.6592359201607514e-05,
"loss": 0.6234,
"step": 1259
},
{
"epoch": 1.1140583554376657,
"grad_norm": 0.2695018947124481,
"learning_rate": 1.658708228460427e-05,
"loss": 0.6158,
"step": 1260
},
{
"epoch": 1.1149425287356323,
"grad_norm": 0.26061496138572693,
"learning_rate": 1.6581802125514233e-05,
"loss": 0.6623,
"step": 1261
},
{
"epoch": 1.1158267020335986,
"grad_norm": 0.27080467343330383,
"learning_rate": 1.6576518726936227e-05,
"loss": 0.6086,
"step": 1262
},
{
"epoch": 1.1167108753315649,
"grad_norm": 0.2601155638694763,
"learning_rate": 1.6571232091470687e-05,
"loss": 0.617,
"step": 1263
},
{
"epoch": 1.1175950486295314,
"grad_norm": 0.2608267366886139,
"learning_rate": 1.6565942221719637e-05,
"loss": 0.5941,
"step": 1264
},
{
"epoch": 1.1184792219274977,
"grad_norm": 0.2553712725639343,
"learning_rate": 1.6560649120286684e-05,
"loss": 0.6135,
"step": 1265
},
{
"epoch": 1.1193633952254642,
"grad_norm": 0.2816087007522583,
"learning_rate": 1.6555352789777036e-05,
"loss": 0.6389,
"step": 1266
},
{
"epoch": 1.1202475685234305,
"grad_norm": 0.2648134231567383,
"learning_rate": 1.6550053232797486e-05,
"loss": 0.6154,
"step": 1267
},
{
"epoch": 1.121131741821397,
"grad_norm": 0.2744760811328888,
"learning_rate": 1.6544750451956416e-05,
"loss": 0.6387,
"step": 1268
},
{
"epoch": 1.1220159151193634,
"grad_norm": 0.2651299238204956,
"learning_rate": 1.653944444986379e-05,
"loss": 0.6405,
"step": 1269
},
{
"epoch": 1.12290008841733,
"grad_norm": 0.282466322183609,
"learning_rate": 1.653413522913117e-05,
"loss": 0.6157,
"step": 1270
},
{
"epoch": 1.1237842617152962,
"grad_norm": 0.27127668261528015,
"learning_rate": 1.6528822792371685e-05,
"loss": 0.6199,
"step": 1271
},
{
"epoch": 1.1246684350132625,
"grad_norm": 0.2709607481956482,
"learning_rate": 1.6523507142200065e-05,
"loss": 0.6249,
"step": 1272
},
{
"epoch": 1.125552608311229,
"grad_norm": 0.2871193289756775,
"learning_rate": 1.6518188281232602e-05,
"loss": 0.6499,
"step": 1273
},
{
"epoch": 1.1264367816091954,
"grad_norm": 0.2739630341529846,
"learning_rate": 1.651286621208719e-05,
"loss": 0.6295,
"step": 1274
},
{
"epoch": 1.1273209549071619,
"grad_norm": 0.27056512236595154,
"learning_rate": 1.650754093738328e-05,
"loss": 0.6312,
"step": 1275
},
{
"epoch": 1.1282051282051282,
"grad_norm": 0.2595468759536743,
"learning_rate": 1.650221245974192e-05,
"loss": 0.6378,
"step": 1276
},
{
"epoch": 1.1290893015030945,
"grad_norm": 0.28136393427848816,
"learning_rate": 1.649688078178572e-05,
"loss": 0.6321,
"step": 1277
},
{
"epoch": 1.129973474801061,
"grad_norm": 0.26631104946136475,
"learning_rate": 1.6491545906138872e-05,
"loss": 0.636,
"step": 1278
},
{
"epoch": 1.1308576480990273,
"grad_norm": 0.2998327910900116,
"learning_rate": 1.6486207835427146e-05,
"loss": 0.6475,
"step": 1279
},
{
"epoch": 1.1317418213969939,
"grad_norm": 0.28328076004981995,
"learning_rate": 1.6480866572277875e-05,
"loss": 0.6487,
"step": 1280
},
{
"epoch": 1.1326259946949602,
"grad_norm": 0.2737075686454773,
"learning_rate": 1.6475522119319968e-05,
"loss": 0.6148,
"step": 1281
},
{
"epoch": 1.1335101679929267,
"grad_norm": 0.27091649174690247,
"learning_rate": 1.64701744791839e-05,
"loss": 0.6089,
"step": 1282
},
{
"epoch": 1.134394341290893,
"grad_norm": 0.26850780844688416,
"learning_rate": 1.6464823654501726e-05,
"loss": 0.6561,
"step": 1283
},
{
"epoch": 1.1352785145888595,
"grad_norm": 0.27347424626350403,
"learning_rate": 1.6459469647907064e-05,
"loss": 0.6251,
"step": 1284
},
{
"epoch": 1.1361626878868258,
"grad_norm": 0.2758561670780182,
"learning_rate": 1.6454112462035082e-05,
"loss": 0.6341,
"step": 1285
},
{
"epoch": 1.1370468611847921,
"grad_norm": 0.26762357354164124,
"learning_rate": 1.644875209952254e-05,
"loss": 0.6257,
"step": 1286
},
{
"epoch": 1.1379310344827587,
"grad_norm": 0.265291690826416,
"learning_rate": 1.6443388563007733e-05,
"loss": 0.6244,
"step": 1287
},
{
"epoch": 1.138815207780725,
"grad_norm": 0.2615032196044922,
"learning_rate": 1.643802185513054e-05,
"loss": 0.6064,
"step": 1288
},
{
"epoch": 1.1396993810786915,
"grad_norm": 0.26807501912117004,
"learning_rate": 1.6432651978532395e-05,
"loss": 0.6229,
"step": 1289
},
{
"epoch": 1.1405835543766578,
"grad_norm": 0.2643338739871979,
"learning_rate": 1.642727893585629e-05,
"loss": 0.6207,
"step": 1290
},
{
"epoch": 1.1414677276746241,
"grad_norm": 0.2551080286502838,
"learning_rate": 1.642190272974677e-05,
"loss": 0.6157,
"step": 1291
},
{
"epoch": 1.1423519009725907,
"grad_norm": 0.26403579115867615,
"learning_rate": 1.641652336284995e-05,
"loss": 0.6082,
"step": 1292
},
{
"epoch": 1.143236074270557,
"grad_norm": 0.256933331489563,
"learning_rate": 1.6411140837813487e-05,
"loss": 0.5929,
"step": 1293
},
{
"epoch": 1.1441202475685235,
"grad_norm": 0.26767805218696594,
"learning_rate": 1.6405755157286597e-05,
"loss": 0.6406,
"step": 1294
},
{
"epoch": 1.1450044208664898,
"grad_norm": 0.2607956528663635,
"learning_rate": 1.6400366323920056e-05,
"loss": 0.6251,
"step": 1295
},
{
"epoch": 1.1458885941644563,
"grad_norm": 0.2549401819705963,
"learning_rate": 1.6394974340366184e-05,
"loss": 0.6122,
"step": 1296
},
{
"epoch": 1.1467727674624226,
"grad_norm": 0.27868473529815674,
"learning_rate": 1.6389579209278855e-05,
"loss": 0.6525,
"step": 1297
},
{
"epoch": 1.1476569407603892,
"grad_norm": 0.2715480625629425,
"learning_rate": 1.6384180933313492e-05,
"loss": 0.6438,
"step": 1298
},
{
"epoch": 1.1485411140583555,
"grad_norm": 0.2569078207015991,
"learning_rate": 1.6378779515127057e-05,
"loss": 0.6224,
"step": 1299
},
{
"epoch": 1.1494252873563218,
"grad_norm": 0.2974988520145416,
"learning_rate": 1.6373374957378078e-05,
"loss": 0.6338,
"step": 1300
},
{
"epoch": 1.1503094606542883,
"grad_norm": 0.2608301341533661,
"learning_rate": 1.6367967262726608e-05,
"loss": 0.6376,
"step": 1301
},
{
"epoch": 1.1511936339522546,
"grad_norm": 0.26160040497779846,
"learning_rate": 1.6362556433834254e-05,
"loss": 0.6215,
"step": 1302
},
{
"epoch": 1.1520778072502211,
"grad_norm": 0.2818124294281006,
"learning_rate": 1.635714247336417e-05,
"loss": 0.6289,
"step": 1303
},
{
"epoch": 1.1529619805481874,
"grad_norm": 0.2657458186149597,
"learning_rate": 1.635172538398104e-05,
"loss": 0.6133,
"step": 1304
},
{
"epoch": 1.1538461538461537,
"grad_norm": 0.26485705375671387,
"learning_rate": 1.6346305168351095e-05,
"loss": 0.6422,
"step": 1305
},
{
"epoch": 1.1547303271441203,
"grad_norm": 0.277269184589386,
"learning_rate": 1.6340881829142103e-05,
"loss": 0.6282,
"step": 1306
},
{
"epoch": 1.1556145004420866,
"grad_norm": 0.2727701663970947,
"learning_rate": 1.6335455369023375e-05,
"loss": 0.6291,
"step": 1307
},
{
"epoch": 1.156498673740053,
"grad_norm": 0.2719358205795288,
"learning_rate": 1.6330025790665743e-05,
"loss": 0.6214,
"step": 1308
},
{
"epoch": 1.1573828470380194,
"grad_norm": 0.2592701017856598,
"learning_rate": 1.6324593096741588e-05,
"loss": 0.6298,
"step": 1309
},
{
"epoch": 1.158267020335986,
"grad_norm": 0.27116450667381287,
"learning_rate": 1.6319157289924826e-05,
"loss": 0.6366,
"step": 1310
},
{
"epoch": 1.1591511936339522,
"grad_norm": 0.2657269835472107,
"learning_rate": 1.631371837289089e-05,
"loss": 0.6191,
"step": 1311
},
{
"epoch": 1.1600353669319188,
"grad_norm": 0.25910571217536926,
"learning_rate": 1.6308276348316755e-05,
"loss": 0.6162,
"step": 1312
},
{
"epoch": 1.160919540229885,
"grad_norm": 0.2947475016117096,
"learning_rate": 1.630283121888093e-05,
"loss": 0.6062,
"step": 1313
},
{
"epoch": 1.1618037135278514,
"grad_norm": 0.2944618761539459,
"learning_rate": 1.6297382987263438e-05,
"loss": 0.6111,
"step": 1314
},
{
"epoch": 1.162687886825818,
"grad_norm": 0.27138277888298035,
"learning_rate": 1.629193165614584e-05,
"loss": 0.6394,
"step": 1315
},
{
"epoch": 1.1635720601237842,
"grad_norm": 0.27073073387145996,
"learning_rate": 1.6286477228211216e-05,
"loss": 0.6258,
"step": 1316
},
{
"epoch": 1.1644562334217508,
"grad_norm": 0.2757546901702881,
"learning_rate": 1.628101970614418e-05,
"loss": 0.6267,
"step": 1317
},
{
"epoch": 1.165340406719717,
"grad_norm": 0.25328877568244934,
"learning_rate": 1.6275559092630854e-05,
"loss": 0.6083,
"step": 1318
},
{
"epoch": 1.1662245800176834,
"grad_norm": 0.32297784090042114,
"learning_rate": 1.6270095390358893e-05,
"loss": 0.6331,
"step": 1319
},
{
"epoch": 1.16710875331565,
"grad_norm": 0.26827719807624817,
"learning_rate": 1.6264628602017473e-05,
"loss": 0.6282,
"step": 1320
},
{
"epoch": 1.1679929266136162,
"grad_norm": 0.3571995794773102,
"learning_rate": 1.625915873029728e-05,
"loss": 0.6194,
"step": 1321
},
{
"epoch": 1.1688770999115827,
"grad_norm": 0.2716355621814728,
"learning_rate": 1.625368577789052e-05,
"loss": 0.6495,
"step": 1322
},
{
"epoch": 1.169761273209549,
"grad_norm": 0.2643284499645233,
"learning_rate": 1.624820974749093e-05,
"loss": 0.618,
"step": 1323
},
{
"epoch": 1.1706454465075156,
"grad_norm": 0.2656436860561371,
"learning_rate": 1.6242730641793735e-05,
"loss": 0.6481,
"step": 1324
},
{
"epoch": 1.1715296198054819,
"grad_norm": 0.2686118185520172,
"learning_rate": 1.6237248463495692e-05,
"loss": 0.6459,
"step": 1325
},
{
"epoch": 1.1724137931034484,
"grad_norm": 0.27660882472991943,
"learning_rate": 1.6231763215295075e-05,
"loss": 0.6209,
"step": 1326
},
{
"epoch": 1.1732979664014147,
"grad_norm": 0.27600327134132385,
"learning_rate": 1.6226274899891655e-05,
"loss": 0.6235,
"step": 1327
},
{
"epoch": 1.174182139699381,
"grad_norm": 0.27709048986434937,
"learning_rate": 1.6220783519986715e-05,
"loss": 0.6204,
"step": 1328
},
{
"epoch": 1.1750663129973475,
"grad_norm": 0.254951149225235,
"learning_rate": 1.6215289078283053e-05,
"loss": 0.62,
"step": 1329
},
{
"epoch": 1.1759504862953138,
"grad_norm": 0.25819993019104004,
"learning_rate": 1.620979157748497e-05,
"loss": 0.6371,
"step": 1330
},
{
"epoch": 1.1768346595932804,
"grad_norm": 0.2618928551673889,
"learning_rate": 1.6204291020298276e-05,
"loss": 0.6292,
"step": 1331
},
{
"epoch": 1.1777188328912467,
"grad_norm": 0.2674223780632019,
"learning_rate": 1.6198787409430278e-05,
"loss": 0.6335,
"step": 1332
},
{
"epoch": 1.178603006189213,
"grad_norm": 0.27773454785346985,
"learning_rate": 1.6193280747589793e-05,
"loss": 0.62,
"step": 1333
},
{
"epoch": 1.1794871794871795,
"grad_norm": 0.26043564081192017,
"learning_rate": 1.618777103748714e-05,
"loss": 0.61,
"step": 1334
},
{
"epoch": 1.1803713527851458,
"grad_norm": 0.26907721161842346,
"learning_rate": 1.6182258281834127e-05,
"loss": 0.6531,
"step": 1335
},
{
"epoch": 1.1812555260831124,
"grad_norm": 0.2774446904659271,
"learning_rate": 1.617674248334408e-05,
"loss": 0.635,
"step": 1336
},
{
"epoch": 1.1821396993810787,
"grad_norm": 0.2726019322872162,
"learning_rate": 1.61712236447318e-05,
"loss": 0.6351,
"step": 1337
},
{
"epoch": 1.1830238726790452,
"grad_norm": 0.27083060145378113,
"learning_rate": 1.6165701768713607e-05,
"loss": 0.6283,
"step": 1338
},
{
"epoch": 1.1839080459770115,
"grad_norm": 0.2634178698062897,
"learning_rate": 1.6160176858007305e-05,
"loss": 0.6275,
"step": 1339
},
{
"epoch": 1.184792219274978,
"grad_norm": 0.2592163681983948,
"learning_rate": 1.6154648915332183e-05,
"loss": 0.6351,
"step": 1340
},
{
"epoch": 1.1856763925729443,
"grad_norm": 0.26565590500831604,
"learning_rate": 1.614911794340904e-05,
"loss": 0.6395,
"step": 1341
},
{
"epoch": 1.1865605658709106,
"grad_norm": 0.2672779858112335,
"learning_rate": 1.6143583944960153e-05,
"loss": 0.6119,
"step": 1342
},
{
"epoch": 1.1874447391688772,
"grad_norm": 0.271102637052536,
"learning_rate": 1.61380469227093e-05,
"loss": 0.624,
"step": 1343
},
{
"epoch": 1.1883289124668435,
"grad_norm": 0.26447901129722595,
"learning_rate": 1.613250687938173e-05,
"loss": 0.6269,
"step": 1344
},
{
"epoch": 1.18921308576481,
"grad_norm": 0.27545759081840515,
"learning_rate": 1.612696381770419e-05,
"loss": 0.6213,
"step": 1345
},
{
"epoch": 1.1900972590627763,
"grad_norm": 0.2762075662612915,
"learning_rate": 1.6121417740404924e-05,
"loss": 0.6703,
"step": 1346
},
{
"epoch": 1.1909814323607426,
"grad_norm": 0.26442280411720276,
"learning_rate": 1.6115868650213632e-05,
"loss": 0.6286,
"step": 1347
},
{
"epoch": 1.1918656056587091,
"grad_norm": 0.2820385992527008,
"learning_rate": 1.6110316549861525e-05,
"loss": 0.6439,
"step": 1348
},
{
"epoch": 1.1927497789566754,
"grad_norm": 0.2627619504928589,
"learning_rate": 1.610476144208128e-05,
"loss": 0.6196,
"step": 1349
},
{
"epoch": 1.193633952254642,
"grad_norm": 0.2752849757671356,
"learning_rate": 1.609920332960705e-05,
"loss": 0.6181,
"step": 1350
},
{
"epoch": 1.1945181255526083,
"grad_norm": 0.27394169569015503,
"learning_rate": 1.6093642215174486e-05,
"loss": 0.6249,
"step": 1351
},
{
"epoch": 1.1954022988505748,
"grad_norm": 0.2750004827976227,
"learning_rate": 1.60880781015207e-05,
"loss": 0.6495,
"step": 1352
},
{
"epoch": 1.1962864721485411,
"grad_norm": 0.2945844829082489,
"learning_rate": 1.6082510991384286e-05,
"loss": 0.6288,
"step": 1353
},
{
"epoch": 1.1971706454465074,
"grad_norm": 0.27252763509750366,
"learning_rate": 1.6076940887505314e-05,
"loss": 0.618,
"step": 1354
},
{
"epoch": 1.198054818744474,
"grad_norm": 0.2628398537635803,
"learning_rate": 1.607136779262532e-05,
"loss": 0.6365,
"step": 1355
},
{
"epoch": 1.1989389920424403,
"grad_norm": 0.2608741521835327,
"learning_rate": 1.6065791709487322e-05,
"loss": 0.6343,
"step": 1356
},
{
"epoch": 1.1998231653404068,
"grad_norm": 0.2840995788574219,
"learning_rate": 1.6060212640835806e-05,
"loss": 0.6302,
"step": 1357
},
{
"epoch": 1.200707338638373,
"grad_norm": 0.2941756844520569,
"learning_rate": 1.605463058941672e-05,
"loss": 0.6396,
"step": 1358
},
{
"epoch": 1.2015915119363396,
"grad_norm": 0.26439139246940613,
"learning_rate": 1.604904555797749e-05,
"loss": 0.6089,
"step": 1359
},
{
"epoch": 1.202475685234306,
"grad_norm": 0.2642272710800171,
"learning_rate": 1.6043457549267008e-05,
"loss": 0.6122,
"step": 1360
},
{
"epoch": 1.2033598585322722,
"grad_norm": 0.2641412615776062,
"learning_rate": 1.6037866566035623e-05,
"loss": 0.62,
"step": 1361
},
{
"epoch": 1.2042440318302388,
"grad_norm": 0.27999386191368103,
"learning_rate": 1.6032272611035154e-05,
"loss": 0.6309,
"step": 1362
},
{
"epoch": 1.205128205128205,
"grad_norm": 0.2647188603878021,
"learning_rate": 1.6026675687018888e-05,
"loss": 0.6554,
"step": 1363
},
{
"epoch": 1.2060123784261716,
"grad_norm": 0.28167954087257385,
"learning_rate": 1.6021075796741555e-05,
"loss": 0.6247,
"step": 1364
},
{
"epoch": 1.206896551724138,
"grad_norm": 0.2792663276195526,
"learning_rate": 1.6015472942959367e-05,
"loss": 0.6424,
"step": 1365
},
{
"epoch": 1.2077807250221044,
"grad_norm": 0.27852723002433777,
"learning_rate": 1.6009867128429982e-05,
"loss": 0.621,
"step": 1366
},
{
"epoch": 1.2086648983200707,
"grad_norm": 0.2794431447982788,
"learning_rate": 1.6004258355912518e-05,
"loss": 0.6404,
"step": 1367
},
{
"epoch": 1.209549071618037,
"grad_norm": 0.2861528992652893,
"learning_rate": 1.5998646628167547e-05,
"loss": 0.6541,
"step": 1368
},
{
"epoch": 1.2104332449160036,
"grad_norm": 0.27545905113220215,
"learning_rate": 1.59930319479571e-05,
"loss": 0.6393,
"step": 1369
},
{
"epoch": 1.2113174182139699,
"grad_norm": 0.27669757604599,
"learning_rate": 1.598741431804466e-05,
"loss": 0.6382,
"step": 1370
},
{
"epoch": 1.2122015915119364,
"grad_norm": 0.2623705565929413,
"learning_rate": 1.5981793741195158e-05,
"loss": 0.6209,
"step": 1371
},
{
"epoch": 1.2130857648099027,
"grad_norm": 0.26495417952537537,
"learning_rate": 1.597617022017498e-05,
"loss": 0.6472,
"step": 1372
},
{
"epoch": 1.2139699381078692,
"grad_norm": 0.25165706872940063,
"learning_rate": 1.597054375775195e-05,
"loss": 0.5925,
"step": 1373
},
{
"epoch": 1.2148541114058355,
"grad_norm": 0.2889125347137451,
"learning_rate": 1.596491435669536e-05,
"loss": 0.6613,
"step": 1374
},
{
"epoch": 1.2157382847038019,
"grad_norm": 0.2734815776348114,
"learning_rate": 1.595928201977593e-05,
"loss": 0.631,
"step": 1375
},
{
"epoch": 1.2166224580017684,
"grad_norm": 0.2703576385974884,
"learning_rate": 1.5953646749765838e-05,
"loss": 0.6232,
"step": 1376
},
{
"epoch": 1.2175066312997347,
"grad_norm": 0.2767334282398224,
"learning_rate": 1.5948008549438694e-05,
"loss": 0.651,
"step": 1377
},
{
"epoch": 1.2183908045977012,
"grad_norm": 0.25227072834968567,
"learning_rate": 1.594236742156956e-05,
"loss": 0.6062,
"step": 1378
},
{
"epoch": 1.2192749778956675,
"grad_norm": 0.261905699968338,
"learning_rate": 1.5936723368934933e-05,
"loss": 0.6322,
"step": 1379
},
{
"epoch": 1.2201591511936338,
"grad_norm": 0.2813282907009125,
"learning_rate": 1.593107639431275e-05,
"loss": 0.6326,
"step": 1380
},
{
"epoch": 1.2210433244916004,
"grad_norm": 0.2548713982105255,
"learning_rate": 1.592542650048239e-05,
"loss": 0.6234,
"step": 1381
},
{
"epoch": 1.2219274977895667,
"grad_norm": 0.2622441351413727,
"learning_rate": 1.5919773690224664e-05,
"loss": 0.6261,
"step": 1382
},
{
"epoch": 1.2228116710875332,
"grad_norm": 0.2574688792228699,
"learning_rate": 1.5914117966321824e-05,
"loss": 0.6126,
"step": 1383
},
{
"epoch": 1.2236958443854995,
"grad_norm": 0.25839763879776,
"learning_rate": 1.590845933155755e-05,
"loss": 0.6175,
"step": 1384
},
{
"epoch": 1.224580017683466,
"grad_norm": 0.2734919488430023,
"learning_rate": 1.590279778871696e-05,
"loss": 0.6614,
"step": 1385
},
{
"epoch": 1.2254641909814323,
"grad_norm": 0.2610730826854706,
"learning_rate": 1.5897133340586596e-05,
"loss": 0.6161,
"step": 1386
},
{
"epoch": 1.2263483642793989,
"grad_norm": 0.25793394446372986,
"learning_rate": 1.589146598995444e-05,
"loss": 0.6158,
"step": 1387
},
{
"epoch": 1.2272325375773652,
"grad_norm": 0.2684408128261566,
"learning_rate": 1.5885795739609894e-05,
"loss": 0.623,
"step": 1388
},
{
"epoch": 1.2281167108753315,
"grad_norm": 0.26952189207077026,
"learning_rate": 1.588012259234379e-05,
"loss": 0.6367,
"step": 1389
},
{
"epoch": 1.229000884173298,
"grad_norm": 0.25151994824409485,
"learning_rate": 1.5874446550948394e-05,
"loss": 0.6265,
"step": 1390
},
{
"epoch": 1.2298850574712643,
"grad_norm": 0.2850368320941925,
"learning_rate": 1.5868767618217377e-05,
"loss": 0.6181,
"step": 1391
},
{
"epoch": 1.2307692307692308,
"grad_norm": 0.26118093729019165,
"learning_rate": 1.5863085796945856e-05,
"loss": 0.6227,
"step": 1392
},
{
"epoch": 1.2316534040671971,
"grad_norm": 0.2850300371646881,
"learning_rate": 1.585740108993035e-05,
"loss": 0.652,
"step": 1393
},
{
"epoch": 1.2325375773651635,
"grad_norm": 0.2551823854446411,
"learning_rate": 1.5851713499968812e-05,
"loss": 0.6057,
"step": 1394
},
{
"epoch": 1.23342175066313,
"grad_norm": 0.2837170362472534,
"learning_rate": 1.5846023029860612e-05,
"loss": 0.6413,
"step": 1395
},
{
"epoch": 1.2343059239610963,
"grad_norm": 0.27628153562545776,
"learning_rate": 1.5840329682406525e-05,
"loss": 0.6243,
"step": 1396
},
{
"epoch": 1.2351900972590628,
"grad_norm": 0.274521142244339,
"learning_rate": 1.5834633460408756e-05,
"loss": 0.6408,
"step": 1397
},
{
"epoch": 1.2360742705570291,
"grad_norm": 0.2636716067790985,
"learning_rate": 1.5828934366670923e-05,
"loss": 0.6379,
"step": 1398
},
{
"epoch": 1.2369584438549956,
"grad_norm": 0.2601407766342163,
"learning_rate": 1.582323240399806e-05,
"loss": 0.6161,
"step": 1399
},
{
"epoch": 1.237842617152962,
"grad_norm": 0.2537432312965393,
"learning_rate": 1.581752757519659e-05,
"loss": 0.6237,
"step": 1400
},
{
"epoch": 1.2387267904509285,
"grad_norm": 0.2714279592037201,
"learning_rate": 1.581181988307438e-05,
"loss": 0.619,
"step": 1401
},
{
"epoch": 1.2396109637488948,
"grad_norm": 0.28167831897735596,
"learning_rate": 1.580610933044069e-05,
"loss": 0.6578,
"step": 1402
},
{
"epoch": 1.240495137046861,
"grad_norm": 0.2750946283340454,
"learning_rate": 1.5800395920106183e-05,
"loss": 0.648,
"step": 1403
},
{
"epoch": 1.2413793103448276,
"grad_norm": 0.26390576362609863,
"learning_rate": 1.579467965488294e-05,
"loss": 0.6047,
"step": 1404
},
{
"epoch": 1.242263483642794,
"grad_norm": 0.2646927237510681,
"learning_rate": 1.578896053758444e-05,
"loss": 0.6265,
"step": 1405
},
{
"epoch": 1.2431476569407605,
"grad_norm": 0.27424487471580505,
"learning_rate": 1.5783238571025563e-05,
"loss": 0.6356,
"step": 1406
},
{
"epoch": 1.2440318302387268,
"grad_norm": 0.28149381279945374,
"learning_rate": 1.5777513758022597e-05,
"loss": 0.6476,
"step": 1407
},
{
"epoch": 1.244916003536693,
"grad_norm": 0.26738864183425903,
"learning_rate": 1.5771786101393237e-05,
"loss": 0.6154,
"step": 1408
},
{
"epoch": 1.2458001768346596,
"grad_norm": 0.2882078289985657,
"learning_rate": 1.5766055603956563e-05,
"loss": 0.6337,
"step": 1409
},
{
"epoch": 1.246684350132626,
"grad_norm": 0.27995678782463074,
"learning_rate": 1.5760322268533062e-05,
"loss": 0.6229,
"step": 1410
},
{
"epoch": 1.2475685234305924,
"grad_norm": 0.28673920035362244,
"learning_rate": 1.5754586097944617e-05,
"loss": 0.6565,
"step": 1411
},
{
"epoch": 1.2484526967285587,
"grad_norm": 0.2755684554576874,
"learning_rate": 1.5748847095014507e-05,
"loss": 0.6139,
"step": 1412
},
{
"epoch": 1.2493368700265253,
"grad_norm": 0.29883909225463867,
"learning_rate": 1.57431052625674e-05,
"loss": 0.6209,
"step": 1413
},
{
"epoch": 1.2502210433244916,
"grad_norm": 0.2717154324054718,
"learning_rate": 1.5737360603429365e-05,
"loss": 0.6172,
"step": 1414
},
{
"epoch": 1.251105216622458,
"grad_norm": 0.30571115016937256,
"learning_rate": 1.573161312042785e-05,
"loss": 0.6196,
"step": 1415
},
{
"epoch": 1.2519893899204244,
"grad_norm": 0.2747178077697754,
"learning_rate": 1.572586281639171e-05,
"loss": 0.6381,
"step": 1416
},
{
"epoch": 1.2528735632183907,
"grad_norm": 0.2817556858062744,
"learning_rate": 1.5720109694151174e-05,
"loss": 0.6369,
"step": 1417
},
{
"epoch": 1.2537577365163572,
"grad_norm": 0.27289384603500366,
"learning_rate": 1.571435375653787e-05,
"loss": 0.6253,
"step": 1418
},
{
"epoch": 1.2546419098143236,
"grad_norm": 0.25015899538993835,
"learning_rate": 1.5708595006384792e-05,
"loss": 0.6246,
"step": 1419
},
{
"epoch": 1.25552608311229,
"grad_norm": 0.2563912272453308,
"learning_rate": 1.5702833446526342e-05,
"loss": 0.6332,
"step": 1420
},
{
"epoch": 1.2564102564102564,
"grad_norm": 0.257411926984787,
"learning_rate": 1.5697069079798294e-05,
"loss": 0.6229,
"step": 1421
},
{
"epoch": 1.2572944297082227,
"grad_norm": 0.25585630536079407,
"learning_rate": 1.569130190903779e-05,
"loss": 0.6089,
"step": 1422
},
{
"epoch": 1.2581786030061892,
"grad_norm": 0.2512992322444916,
"learning_rate": 1.5685531937083384e-05,
"loss": 0.6335,
"step": 1423
},
{
"epoch": 1.2590627763041558,
"grad_norm": 0.24229037761688232,
"learning_rate": 1.5679759166774983e-05,
"loss": 0.6141,
"step": 1424
},
{
"epoch": 1.259946949602122,
"grad_norm": 0.2551206946372986,
"learning_rate": 1.5673983600953873e-05,
"loss": 0.6189,
"step": 1425
},
{
"epoch": 1.2608311229000884,
"grad_norm": 0.2525327503681183,
"learning_rate": 1.5668205242462728e-05,
"loss": 0.5938,
"step": 1426
},
{
"epoch": 1.261715296198055,
"grad_norm": 0.25067248940467834,
"learning_rate": 1.566242409414559e-05,
"loss": 0.6292,
"step": 1427
},
{
"epoch": 1.2625994694960212,
"grad_norm": 0.26346850395202637,
"learning_rate": 1.5656640158847874e-05,
"loss": 0.6298,
"step": 1428
},
{
"epoch": 1.2634836427939877,
"grad_norm": 0.2548886239528656,
"learning_rate": 1.5650853439416364e-05,
"loss": 0.6234,
"step": 1429
},
{
"epoch": 1.264367816091954,
"grad_norm": 0.25361084938049316,
"learning_rate": 1.5645063938699218e-05,
"loss": 0.5976,
"step": 1430
},
{
"epoch": 1.2652519893899203,
"grad_norm": 0.26032042503356934,
"learning_rate": 1.5639271659545964e-05,
"loss": 0.6337,
"step": 1431
},
{
"epoch": 1.2661361626878869,
"grad_norm": 0.264461487531662,
"learning_rate": 1.5633476604807497e-05,
"loss": 0.6416,
"step": 1432
},
{
"epoch": 1.2670203359858532,
"grad_norm": 0.2573243975639343,
"learning_rate": 1.5627678777336077e-05,
"loss": 0.6296,
"step": 1433
},
{
"epoch": 1.2679045092838197,
"grad_norm": 0.2643515169620514,
"learning_rate": 1.5621878179985323e-05,
"loss": 0.6187,
"step": 1434
},
{
"epoch": 1.268788682581786,
"grad_norm": 0.25730323791503906,
"learning_rate": 1.561607481561023e-05,
"loss": 0.6236,
"step": 1435
},
{
"epoch": 1.2696728558797523,
"grad_norm": 0.25765368342399597,
"learning_rate": 1.5610268687067144e-05,
"loss": 0.6221,
"step": 1436
},
{
"epoch": 1.2705570291777188,
"grad_norm": 0.25590279698371887,
"learning_rate": 1.5604459797213777e-05,
"loss": 0.6024,
"step": 1437
},
{
"epoch": 1.2714412024756854,
"grad_norm": 0.2583705484867096,
"learning_rate": 1.559864814890919e-05,
"loss": 0.6149,
"step": 1438
},
{
"epoch": 1.2723253757736517,
"grad_norm": 0.25402167439460754,
"learning_rate": 1.5592833745013824e-05,
"loss": 0.6061,
"step": 1439
},
{
"epoch": 1.273209549071618,
"grad_norm": 0.2650664448738098,
"learning_rate": 1.558701658838945e-05,
"loss": 0.6238,
"step": 1440
},
{
"epoch": 1.2740937223695845,
"grad_norm": 0.2692378759384155,
"learning_rate": 1.5581196681899217e-05,
"loss": 0.6386,
"step": 1441
},
{
"epoch": 1.2749778956675508,
"grad_norm": 0.26236826181411743,
"learning_rate": 1.5575374028407607e-05,
"loss": 0.6264,
"step": 1442
},
{
"epoch": 1.2758620689655173,
"grad_norm": 0.25569644570350647,
"learning_rate": 1.556954863078047e-05,
"loss": 0.6033,
"step": 1443
},
{
"epoch": 1.2767462422634837,
"grad_norm": 0.29661980271339417,
"learning_rate": 1.5563720491885e-05,
"loss": 0.6113,
"step": 1444
},
{
"epoch": 1.27763041556145,
"grad_norm": 0.2592065632343292,
"learning_rate": 1.5557889614589738e-05,
"loss": 0.6152,
"step": 1445
},
{
"epoch": 1.2785145888594165,
"grad_norm": 0.27681753039360046,
"learning_rate": 1.5552056001764576e-05,
"loss": 0.6577,
"step": 1446
},
{
"epoch": 1.2793987621573828,
"grad_norm": 0.25626248121261597,
"learning_rate": 1.554621965628075e-05,
"loss": 0.6243,
"step": 1447
},
{
"epoch": 1.2802829354553493,
"grad_norm": 0.2691269516944885,
"learning_rate": 1.5540380581010842e-05,
"loss": 0.6207,
"step": 1448
},
{
"epoch": 1.2811671087533156,
"grad_norm": 0.25934237241744995,
"learning_rate": 1.5534538778828786e-05,
"loss": 0.624,
"step": 1449
},
{
"epoch": 1.282051282051282,
"grad_norm": 0.25388020277023315,
"learning_rate": 1.5528694252609843e-05,
"loss": 0.6158,
"step": 1450
},
{
"epoch": 1.2829354553492485,
"grad_norm": 0.25906768441200256,
"learning_rate": 1.5522847005230623e-05,
"loss": 0.5852,
"step": 1451
},
{
"epoch": 1.2838196286472148,
"grad_norm": 0.2654707431793213,
"learning_rate": 1.551699703956908e-05,
"loss": 0.6216,
"step": 1452
},
{
"epoch": 1.2847038019451813,
"grad_norm": 0.26841551065444946,
"learning_rate": 1.551114435850449e-05,
"loss": 0.5972,
"step": 1453
},
{
"epoch": 1.2855879752431476,
"grad_norm": 0.2597275376319885,
"learning_rate": 1.5505288964917486e-05,
"loss": 0.6297,
"step": 1454
},
{
"epoch": 1.2864721485411141,
"grad_norm": 0.2709927558898926,
"learning_rate": 1.549943086169002e-05,
"loss": 0.6327,
"step": 1455
},
{
"epoch": 1.2873563218390804,
"grad_norm": 0.27075862884521484,
"learning_rate": 1.549357005170539e-05,
"loss": 0.6345,
"step": 1456
},
{
"epoch": 1.288240495137047,
"grad_norm": 0.24558989703655243,
"learning_rate": 1.5487706537848212e-05,
"loss": 0.5976,
"step": 1457
},
{
"epoch": 1.2891246684350133,
"grad_norm": 0.27163225412368774,
"learning_rate": 1.548184032300444e-05,
"loss": 0.6216,
"step": 1458
},
{
"epoch": 1.2900088417329796,
"grad_norm": 0.2628497779369354,
"learning_rate": 1.547597141006137e-05,
"loss": 0.6167,
"step": 1459
},
{
"epoch": 1.2908930150309461,
"grad_norm": 0.2744913101196289,
"learning_rate": 1.5470099801907603e-05,
"loss": 0.6294,
"step": 1460
},
{
"epoch": 1.2917771883289124,
"grad_norm": 0.2589704692363739,
"learning_rate": 1.546422550143308e-05,
"loss": 0.6337,
"step": 1461
},
{
"epoch": 1.292661361626879,
"grad_norm": 0.2813946008682251,
"learning_rate": 1.5458348511529067e-05,
"loss": 0.617,
"step": 1462
},
{
"epoch": 1.2935455349248453,
"grad_norm": 0.2847347855567932,
"learning_rate": 1.5452468835088154e-05,
"loss": 0.619,
"step": 1463
},
{
"epoch": 1.2944297082228116,
"grad_norm": 0.2629869282245636,
"learning_rate": 1.5446586475004243e-05,
"loss": 0.6396,
"step": 1464
},
{
"epoch": 1.295313881520778,
"grad_norm": 0.271826833486557,
"learning_rate": 1.5440701434172564e-05,
"loss": 0.6133,
"step": 1465
},
{
"epoch": 1.2961980548187444,
"grad_norm": 0.27219828963279724,
"learning_rate": 1.5434813715489675e-05,
"loss": 0.6287,
"step": 1466
},
{
"epoch": 1.297082228116711,
"grad_norm": 0.26825278997421265,
"learning_rate": 1.542892332185344e-05,
"loss": 0.6126,
"step": 1467
},
{
"epoch": 1.2979664014146772,
"grad_norm": 0.2580903172492981,
"learning_rate": 1.5423030256163045e-05,
"loss": 0.5998,
"step": 1468
},
{
"epoch": 1.2988505747126438,
"grad_norm": 0.2648330330848694,
"learning_rate": 1.541713452131899e-05,
"loss": 0.5857,
"step": 1469
},
{
"epoch": 1.29973474801061,
"grad_norm": 0.28202253580093384,
"learning_rate": 1.5411236120223084e-05,
"loss": 0.5964,
"step": 1470
},
{
"epoch": 1.3006189213085766,
"grad_norm": 0.2608523964881897,
"learning_rate": 1.5405335055778457e-05,
"loss": 0.6266,
"step": 1471
},
{
"epoch": 1.301503094606543,
"grad_norm": 0.2718716859817505,
"learning_rate": 1.5399431330889543e-05,
"loss": 0.6121,
"step": 1472
},
{
"epoch": 1.3023872679045092,
"grad_norm": 0.26414912939071655,
"learning_rate": 1.539352494846209e-05,
"loss": 0.6285,
"step": 1473
},
{
"epoch": 1.3032714412024757,
"grad_norm": 0.26774054765701294,
"learning_rate": 1.5387615911403154e-05,
"loss": 0.612,
"step": 1474
},
{
"epoch": 1.304155614500442,
"grad_norm": 0.2646068334579468,
"learning_rate": 1.538170422262109e-05,
"loss": 0.6203,
"step": 1475
},
{
"epoch": 1.3050397877984086,
"grad_norm": 0.2726859748363495,
"learning_rate": 1.5375789885025573e-05,
"loss": 0.6249,
"step": 1476
},
{
"epoch": 1.3059239610963749,
"grad_norm": 0.27412188053131104,
"learning_rate": 1.536987290152756e-05,
"loss": 0.6395,
"step": 1477
},
{
"epoch": 1.3068081343943412,
"grad_norm": 0.2646084725856781,
"learning_rate": 1.5363953275039335e-05,
"loss": 0.6201,
"step": 1478
},
{
"epoch": 1.3076923076923077,
"grad_norm": 0.26556506752967834,
"learning_rate": 1.5358031008474462e-05,
"loss": 0.6068,
"step": 1479
},
{
"epoch": 1.308576480990274,
"grad_norm": 0.2664581537246704,
"learning_rate": 1.5352106104747816e-05,
"loss": 0.6139,
"step": 1480
},
{
"epoch": 1.3094606542882405,
"grad_norm": 0.2751251757144928,
"learning_rate": 1.5346178566775568e-05,
"loss": 0.6255,
"step": 1481
},
{
"epoch": 1.3103448275862069,
"grad_norm": 0.28228485584259033,
"learning_rate": 1.5340248397475185e-05,
"loss": 0.6406,
"step": 1482
},
{
"epoch": 1.3112290008841734,
"grad_norm": 0.2637360394001007,
"learning_rate": 1.5334315599765427e-05,
"loss": 0.6239,
"step": 1483
},
{
"epoch": 1.3121131741821397,
"grad_norm": 0.2587665617465973,
"learning_rate": 1.5328380176566348e-05,
"loss": 0.6117,
"step": 1484
},
{
"epoch": 1.3129973474801062,
"grad_norm": 0.271525502204895,
"learning_rate": 1.5322442130799298e-05,
"loss": 0.6219,
"step": 1485
},
{
"epoch": 1.3138815207780725,
"grad_norm": 0.26174232363700867,
"learning_rate": 1.531650146538692e-05,
"loss": 0.6187,
"step": 1486
},
{
"epoch": 1.3147656940760388,
"grad_norm": 0.27605360746383667,
"learning_rate": 1.5310558183253132e-05,
"loss": 0.6384,
"step": 1487
},
{
"epoch": 1.3156498673740054,
"grad_norm": 0.25944581627845764,
"learning_rate": 1.5304612287323156e-05,
"loss": 0.6301,
"step": 1488
},
{
"epoch": 1.3165340406719717,
"grad_norm": 0.27530503273010254,
"learning_rate": 1.5298663780523492e-05,
"loss": 0.6179,
"step": 1489
},
{
"epoch": 1.3174182139699382,
"grad_norm": 0.26574215292930603,
"learning_rate": 1.529271266578193e-05,
"loss": 0.6251,
"step": 1490
},
{
"epoch": 1.3183023872679045,
"grad_norm": 0.27575376629829407,
"learning_rate": 1.5286758946027537e-05,
"loss": 0.6349,
"step": 1491
},
{
"epoch": 1.3191865605658708,
"grad_norm": 0.26271700859069824,
"learning_rate": 1.5280802624190668e-05,
"loss": 0.6075,
"step": 1492
},
{
"epoch": 1.3200707338638373,
"grad_norm": 0.26939553022384644,
"learning_rate": 1.5274843703202954e-05,
"loss": 0.6321,
"step": 1493
},
{
"epoch": 1.3209549071618036,
"grad_norm": 0.2668761909008026,
"learning_rate": 1.5268882185997315e-05,
"loss": 0.6087,
"step": 1494
},
{
"epoch": 1.3218390804597702,
"grad_norm": 0.2569202482700348,
"learning_rate": 1.5262918075507934e-05,
"loss": 0.6199,
"step": 1495
},
{
"epoch": 1.3227232537577365,
"grad_norm": 0.25540322065353394,
"learning_rate": 1.5256951374670283e-05,
"loss": 0.616,
"step": 1496
},
{
"epoch": 1.323607427055703,
"grad_norm": 0.27239009737968445,
"learning_rate": 1.5250982086421102e-05,
"loss": 0.6415,
"step": 1497
},
{
"epoch": 1.3244916003536693,
"grad_norm": 0.2665350139141083,
"learning_rate": 1.5245010213698406e-05,
"loss": 0.6371,
"step": 1498
},
{
"epoch": 1.3253757736516358,
"grad_norm": 0.2557649314403534,
"learning_rate": 1.5239035759441487e-05,
"loss": 0.6196,
"step": 1499
},
{
"epoch": 1.3262599469496021,
"grad_norm": 0.25333184003829956,
"learning_rate": 1.5233058726590896e-05,
"loss": 0.6127,
"step": 1500
},
{
"epoch": 1.3262599469496021,
"eval_loss": 0.5751532316207886,
"eval_runtime": 635.7244,
"eval_samples_per_second": 9.488,
"eval_steps_per_second": 1.186,
"step": 1500
},
{
"epoch": 1.3271441202475684,
"grad_norm": 0.2886020243167877,
"learning_rate": 1.5227079118088468e-05,
"loss": 0.6449,
"step": 1501
},
{
"epoch": 1.328028293545535,
"grad_norm": 0.2658093571662903,
"learning_rate": 1.5221096936877293e-05,
"loss": 0.611,
"step": 1502
},
{
"epoch": 1.3289124668435013,
"grad_norm": 0.28843459486961365,
"learning_rate": 1.5215112185901737e-05,
"loss": 0.5933,
"step": 1503
},
{
"epoch": 1.3297966401414678,
"grad_norm": 0.26414918899536133,
"learning_rate": 1.5209124868107419e-05,
"loss": 0.6051,
"step": 1504
},
{
"epoch": 1.3306808134394341,
"grad_norm": 0.28977370262145996,
"learning_rate": 1.5203134986441235e-05,
"loss": 0.6142,
"step": 1505
},
{
"epoch": 1.3315649867374004,
"grad_norm": 0.26885777711868286,
"learning_rate": 1.5197142543851335e-05,
"loss": 0.6283,
"step": 1506
},
{
"epoch": 1.332449160035367,
"grad_norm": 0.2773381471633911,
"learning_rate": 1.5191147543287128e-05,
"loss": 0.6294,
"step": 1507
},
{
"epoch": 1.3333333333333333,
"grad_norm": 0.2757658064365387,
"learning_rate": 1.5185149987699287e-05,
"loss": 0.6256,
"step": 1508
},
{
"epoch": 1.3342175066312998,
"grad_norm": 0.27311399579048157,
"learning_rate": 1.5179149880039745e-05,
"loss": 0.5989,
"step": 1509
},
{
"epoch": 1.335101679929266,
"grad_norm": 0.2559478282928467,
"learning_rate": 1.5173147223261677e-05,
"loss": 0.6113,
"step": 1510
},
{
"epoch": 1.3359858532272324,
"grad_norm": 0.2946409583091736,
"learning_rate": 1.516714202031953e-05,
"loss": 0.6078,
"step": 1511
},
{
"epoch": 1.336870026525199,
"grad_norm": 0.2523629069328308,
"learning_rate": 1.5161134274168994e-05,
"loss": 0.6321,
"step": 1512
},
{
"epoch": 1.3377541998231655,
"grad_norm": 0.2657115161418915,
"learning_rate": 1.5155123987767008e-05,
"loss": 0.6359,
"step": 1513
},
{
"epoch": 1.3386383731211318,
"grad_norm": 0.27508923411369324,
"learning_rate": 1.514911116407178e-05,
"loss": 0.6174,
"step": 1514
},
{
"epoch": 1.339522546419098,
"grad_norm": 0.2615225911140442,
"learning_rate": 1.5143095806042738e-05,
"loss": 0.6133,
"step": 1515
},
{
"epoch": 1.3404067197170646,
"grad_norm": 0.25328344106674194,
"learning_rate": 1.5137077916640582e-05,
"loss": 0.6206,
"step": 1516
},
{
"epoch": 1.341290893015031,
"grad_norm": 0.2706657648086548,
"learning_rate": 1.5131057498827244e-05,
"loss": 0.6387,
"step": 1517
},
{
"epoch": 1.3421750663129974,
"grad_norm": 0.26478448510169983,
"learning_rate": 1.5125034555565908e-05,
"loss": 0.6312,
"step": 1518
},
{
"epoch": 1.3430592396109637,
"grad_norm": 0.2657945454120636,
"learning_rate": 1.5119009089820997e-05,
"loss": 0.61,
"step": 1519
},
{
"epoch": 1.34394341290893,
"grad_norm": 0.2631399929523468,
"learning_rate": 1.5112981104558176e-05,
"loss": 0.6315,
"step": 1520
},
{
"epoch": 1.3448275862068966,
"grad_norm": 0.27646076679229736,
"learning_rate": 1.510695060274435e-05,
"loss": 0.6283,
"step": 1521
},
{
"epoch": 1.3457117595048629,
"grad_norm": 0.2719420790672302,
"learning_rate": 1.5100917587347668e-05,
"loss": 0.6191,
"step": 1522
},
{
"epoch": 1.3465959328028294,
"grad_norm": 0.2722283899784088,
"learning_rate": 1.5094882061337504e-05,
"loss": 0.6286,
"step": 1523
},
{
"epoch": 1.3474801061007957,
"grad_norm": 0.27759474515914917,
"learning_rate": 1.5088844027684481e-05,
"loss": 0.6167,
"step": 1524
},
{
"epoch": 1.348364279398762,
"grad_norm": 0.2806648015975952,
"learning_rate": 1.5082803489360446e-05,
"loss": 0.6772,
"step": 1525
},
{
"epoch": 1.3492484526967286,
"grad_norm": 0.2694559693336487,
"learning_rate": 1.5076760449338488e-05,
"loss": 0.6204,
"step": 1526
},
{
"epoch": 1.350132625994695,
"grad_norm": 0.25901177525520325,
"learning_rate": 1.5070714910592917e-05,
"loss": 0.5965,
"step": 1527
},
{
"epoch": 1.3510167992926614,
"grad_norm": 0.2583364248275757,
"learning_rate": 1.5064666876099281e-05,
"loss": 0.611,
"step": 1528
},
{
"epoch": 1.3519009725906277,
"grad_norm": 0.2742071747779846,
"learning_rate": 1.5058616348834353e-05,
"loss": 0.6494,
"step": 1529
},
{
"epoch": 1.3527851458885942,
"grad_norm": 0.2731901705265045,
"learning_rate": 1.5052563331776137e-05,
"loss": 0.6051,
"step": 1530
},
{
"epoch": 1.3536693191865605,
"grad_norm": 0.263033002614975,
"learning_rate": 1.5046507827903851e-05,
"loss": 0.6344,
"step": 1531
},
{
"epoch": 1.354553492484527,
"grad_norm": 0.2740442454814911,
"learning_rate": 1.5040449840197955e-05,
"loss": 0.6267,
"step": 1532
},
{
"epoch": 1.3554376657824934,
"grad_norm": 0.2606651782989502,
"learning_rate": 1.5034389371640115e-05,
"loss": 0.6358,
"step": 1533
},
{
"epoch": 1.3563218390804597,
"grad_norm": 0.26348283886909485,
"learning_rate": 1.5028326425213222e-05,
"loss": 0.6338,
"step": 1534
},
{
"epoch": 1.3572060123784262,
"grad_norm": 0.278915137052536,
"learning_rate": 1.50222610039014e-05,
"loss": 0.6127,
"step": 1535
},
{
"epoch": 1.3580901856763925,
"grad_norm": 0.25526949763298035,
"learning_rate": 1.5016193110689967e-05,
"loss": 0.6279,
"step": 1536
},
{
"epoch": 1.358974358974359,
"grad_norm": 0.27784663438796997,
"learning_rate": 1.5010122748565481e-05,
"loss": 0.6603,
"step": 1537
},
{
"epoch": 1.3598585322723253,
"grad_norm": 0.25813254714012146,
"learning_rate": 1.5004049920515694e-05,
"loss": 0.6278,
"step": 1538
},
{
"epoch": 1.3607427055702916,
"grad_norm": 0.25734275579452515,
"learning_rate": 1.49979746295296e-05,
"loss": 0.6154,
"step": 1539
},
{
"epoch": 1.3616268788682582,
"grad_norm": 0.2655569016933441,
"learning_rate": 1.4991896878597371e-05,
"loss": 0.6253,
"step": 1540
},
{
"epoch": 1.3625110521662247,
"grad_norm": 0.27318719029426575,
"learning_rate": 1.4985816670710415e-05,
"loss": 0.6055,
"step": 1541
},
{
"epoch": 1.363395225464191,
"grad_norm": 0.2607451379299164,
"learning_rate": 1.4979734008861341e-05,
"loss": 0.6196,
"step": 1542
},
{
"epoch": 1.3642793987621573,
"grad_norm": 0.25516003370285034,
"learning_rate": 1.4973648896043964e-05,
"loss": 0.6174,
"step": 1543
},
{
"epoch": 1.3651635720601238,
"grad_norm": 0.24676798284053802,
"learning_rate": 1.4967561335253307e-05,
"loss": 0.6049,
"step": 1544
},
{
"epoch": 1.3660477453580901,
"grad_norm": 0.25608208775520325,
"learning_rate": 1.49614713294856e-05,
"loss": 0.634,
"step": 1545
},
{
"epoch": 1.3669319186560567,
"grad_norm": 0.26682937145233154,
"learning_rate": 1.495537888173827e-05,
"loss": 0.6236,
"step": 1546
},
{
"epoch": 1.367816091954023,
"grad_norm": 0.2559511959552765,
"learning_rate": 1.4949283995009953e-05,
"loss": 0.6338,
"step": 1547
},
{
"epoch": 1.3687002652519893,
"grad_norm": 0.27806738018989563,
"learning_rate": 1.4943186672300485e-05,
"loss": 0.6761,
"step": 1548
},
{
"epoch": 1.3695844385499558,
"grad_norm": 0.2676104009151459,
"learning_rate": 1.4937086916610895e-05,
"loss": 0.6475,
"step": 1549
},
{
"epoch": 1.3704686118479221,
"grad_norm": 0.2742098271846771,
"learning_rate": 1.4930984730943417e-05,
"loss": 0.6197,
"step": 1550
},
{
"epoch": 1.3713527851458887,
"grad_norm": 0.25919288396835327,
"learning_rate": 1.4924880118301475e-05,
"loss": 0.6267,
"step": 1551
},
{
"epoch": 1.372236958443855,
"grad_norm": 0.2618049681186676,
"learning_rate": 1.4918773081689688e-05,
"loss": 0.6285,
"step": 1552
},
{
"epoch": 1.3731211317418213,
"grad_norm": 0.2592207193374634,
"learning_rate": 1.4912663624113871e-05,
"loss": 0.6238,
"step": 1553
},
{
"epoch": 1.3740053050397878,
"grad_norm": 0.2704594135284424,
"learning_rate": 1.490655174858103e-05,
"loss": 0.6212,
"step": 1554
},
{
"epoch": 1.3748894783377543,
"grad_norm": 0.25088632106781006,
"learning_rate": 1.4900437458099364e-05,
"loss": 0.6322,
"step": 1555
},
{
"epoch": 1.3757736516357206,
"grad_norm": 0.2540808618068695,
"learning_rate": 1.489432075567825e-05,
"loss": 0.6329,
"step": 1556
},
{
"epoch": 1.376657824933687,
"grad_norm": 0.26743555068969727,
"learning_rate": 1.4888201644328265e-05,
"loss": 0.6182,
"step": 1557
},
{
"epoch": 1.3775419982316535,
"grad_norm": 0.26381686329841614,
"learning_rate": 1.4882080127061162e-05,
"loss": 0.616,
"step": 1558
},
{
"epoch": 1.3784261715296198,
"grad_norm": 0.2577098608016968,
"learning_rate": 1.4875956206889885e-05,
"loss": 0.6193,
"step": 1559
},
{
"epoch": 1.3793103448275863,
"grad_norm": 0.2623966932296753,
"learning_rate": 1.4869829886828555e-05,
"loss": 0.6349,
"step": 1560
},
{
"epoch": 1.3801945181255526,
"grad_norm": 0.27270275354385376,
"learning_rate": 1.4863701169892477e-05,
"loss": 0.6445,
"step": 1561
},
{
"epoch": 1.381078691423519,
"grad_norm": 0.25518476963043213,
"learning_rate": 1.4857570059098139e-05,
"loss": 0.6195,
"step": 1562
},
{
"epoch": 1.3819628647214854,
"grad_norm": 0.24892447888851166,
"learning_rate": 1.48514365574632e-05,
"loss": 0.6013,
"step": 1563
},
{
"epoch": 1.3828470380194517,
"grad_norm": 0.2676941156387329,
"learning_rate": 1.4845300668006501e-05,
"loss": 0.6099,
"step": 1564
},
{
"epoch": 1.3837312113174183,
"grad_norm": 0.2486124187707901,
"learning_rate": 1.4839162393748056e-05,
"loss": 0.5988,
"step": 1565
},
{
"epoch": 1.3846153846153846,
"grad_norm": 0.26621371507644653,
"learning_rate": 1.4833021737709059e-05,
"loss": 0.6444,
"step": 1566
},
{
"epoch": 1.3854995579133509,
"grad_norm": 0.262096643447876,
"learning_rate": 1.4826878702911862e-05,
"loss": 0.6324,
"step": 1567
},
{
"epoch": 1.3863837312113174,
"grad_norm": 0.25397616624832153,
"learning_rate": 1.4820733292380002e-05,
"loss": 0.6127,
"step": 1568
},
{
"epoch": 1.387267904509284,
"grad_norm": 0.2548450827598572,
"learning_rate": 1.481458550913818e-05,
"loss": 0.6425,
"step": 1569
},
{
"epoch": 1.3881520778072503,
"grad_norm": 0.25873392820358276,
"learning_rate": 1.4808435356212263e-05,
"loss": 0.5939,
"step": 1570
},
{
"epoch": 1.3890362511052166,
"grad_norm": 0.24395067989826202,
"learning_rate": 1.4802282836629288e-05,
"loss": 0.6089,
"step": 1571
},
{
"epoch": 1.389920424403183,
"grad_norm": 0.247545063495636,
"learning_rate": 1.4796127953417454e-05,
"loss": 0.5942,
"step": 1572
},
{
"epoch": 1.3908045977011494,
"grad_norm": 0.2622489929199219,
"learning_rate": 1.4789970709606124e-05,
"loss": 0.6211,
"step": 1573
},
{
"epoch": 1.391688770999116,
"grad_norm": 0.25747135281562805,
"learning_rate": 1.4783811108225822e-05,
"loss": 0.6489,
"step": 1574
},
{
"epoch": 1.3925729442970822,
"grad_norm": 0.258484423160553,
"learning_rate": 1.477764915230824e-05,
"loss": 0.6244,
"step": 1575
},
{
"epoch": 1.3934571175950485,
"grad_norm": 0.2539711892604828,
"learning_rate": 1.4771484844886215e-05,
"loss": 0.6397,
"step": 1576
},
{
"epoch": 1.394341290893015,
"grad_norm": 0.261782169342041,
"learning_rate": 1.4765318188993749e-05,
"loss": 0.6197,
"step": 1577
},
{
"epoch": 1.3952254641909814,
"grad_norm": 0.24321092665195465,
"learning_rate": 1.4759149187666007e-05,
"loss": 0.6281,
"step": 1578
},
{
"epoch": 1.396109637488948,
"grad_norm": 0.2606092393398285,
"learning_rate": 1.4752977843939293e-05,
"loss": 0.6299,
"step": 1579
},
{
"epoch": 1.3969938107869142,
"grad_norm": 0.25254756212234497,
"learning_rate": 1.4746804160851076e-05,
"loss": 0.6244,
"step": 1580
},
{
"epoch": 1.3978779840848805,
"grad_norm": 0.23873089253902435,
"learning_rate": 1.4740628141439973e-05,
"loss": 0.5912,
"step": 1581
},
{
"epoch": 1.398762157382847,
"grad_norm": 0.2505509853363037,
"learning_rate": 1.4734449788745745e-05,
"loss": 0.6064,
"step": 1582
},
{
"epoch": 1.3996463306808136,
"grad_norm": 0.2468908280134201,
"learning_rate": 1.4728269105809312e-05,
"loss": 0.6374,
"step": 1583
},
{
"epoch": 1.4005305039787799,
"grad_norm": 0.28137269616127014,
"learning_rate": 1.4722086095672738e-05,
"loss": 0.6289,
"step": 1584
},
{
"epoch": 1.4014146772767462,
"grad_norm": 0.2620140016078949,
"learning_rate": 1.4715900761379225e-05,
"loss": 0.6124,
"step": 1585
},
{
"epoch": 1.4022988505747127,
"grad_norm": 0.2471739649772644,
"learning_rate": 1.4709713105973125e-05,
"loss": 0.6069,
"step": 1586
},
{
"epoch": 1.403183023872679,
"grad_norm": 0.2605185806751251,
"learning_rate": 1.4703523132499933e-05,
"loss": 0.6153,
"step": 1587
},
{
"epoch": 1.4040671971706455,
"grad_norm": 0.2583577632904053,
"learning_rate": 1.4697330844006281e-05,
"loss": 0.6192,
"step": 1588
},
{
"epoch": 1.4049513704686118,
"grad_norm": 0.2621200382709503,
"learning_rate": 1.4691136243539946e-05,
"loss": 0.6265,
"step": 1589
},
{
"epoch": 1.4058355437665782,
"grad_norm": 0.2573765218257904,
"learning_rate": 1.4684939334149835e-05,
"loss": 0.6142,
"step": 1590
},
{
"epoch": 1.4067197170645447,
"grad_norm": 0.2725617289543152,
"learning_rate": 1.4678740118886004e-05,
"loss": 0.6036,
"step": 1591
},
{
"epoch": 1.407603890362511,
"grad_norm": 0.26118481159210205,
"learning_rate": 1.4672538600799627e-05,
"loss": 0.6413,
"step": 1592
},
{
"epoch": 1.4084880636604775,
"grad_norm": 0.273715078830719,
"learning_rate": 1.4666334782943026e-05,
"loss": 0.6444,
"step": 1593
},
{
"epoch": 1.4093722369584438,
"grad_norm": 0.25127696990966797,
"learning_rate": 1.4660128668369647e-05,
"loss": 0.6285,
"step": 1594
},
{
"epoch": 1.4102564102564101,
"grad_norm": 0.267498254776001,
"learning_rate": 1.465392026013407e-05,
"loss": 0.6056,
"step": 1595
},
{
"epoch": 1.4111405835543767,
"grad_norm": 0.2602406442165375,
"learning_rate": 1.4647709561292003e-05,
"loss": 0.6157,
"step": 1596
},
{
"epoch": 1.4120247568523432,
"grad_norm": 0.269535630941391,
"learning_rate": 1.4641496574900285e-05,
"loss": 0.6445,
"step": 1597
},
{
"epoch": 1.4129089301503095,
"grad_norm": 0.260507196187973,
"learning_rate": 1.4635281304016866e-05,
"loss": 0.6295,
"step": 1598
},
{
"epoch": 1.4137931034482758,
"grad_norm": 0.25738662481307983,
"learning_rate": 1.4629063751700843e-05,
"loss": 0.6131,
"step": 1599
},
{
"epoch": 1.4146772767462423,
"grad_norm": 0.2653326392173767,
"learning_rate": 1.4622843921012418e-05,
"loss": 0.6164,
"step": 1600
},
{
"epoch": 1.4155614500442086,
"grad_norm": 0.2698444724082947,
"learning_rate": 1.4616621815012924e-05,
"loss": 0.6323,
"step": 1601
},
{
"epoch": 1.4164456233421752,
"grad_norm": 0.2569431960582733,
"learning_rate": 1.4610397436764808e-05,
"loss": 0.5926,
"step": 1602
},
{
"epoch": 1.4173297966401415,
"grad_norm": 0.2506994307041168,
"learning_rate": 1.4604170789331638e-05,
"loss": 0.6141,
"step": 1603
},
{
"epoch": 1.4182139699381078,
"grad_norm": 0.27625998854637146,
"learning_rate": 1.4597941875778101e-05,
"loss": 0.6178,
"step": 1604
},
{
"epoch": 1.4190981432360743,
"grad_norm": 0.257918119430542,
"learning_rate": 1.4591710699169994e-05,
"loss": 0.6186,
"step": 1605
},
{
"epoch": 1.4199823165340406,
"grad_norm": 0.2526085376739502,
"learning_rate": 1.4585477262574237e-05,
"loss": 0.5999,
"step": 1606
},
{
"epoch": 1.4208664898320071,
"grad_norm": 0.26778221130371094,
"learning_rate": 1.4579241569058851e-05,
"loss": 0.6512,
"step": 1607
},
{
"epoch": 1.4217506631299734,
"grad_norm": 0.27390381693840027,
"learning_rate": 1.4573003621692977e-05,
"loss": 0.6328,
"step": 1608
},
{
"epoch": 1.4226348364279398,
"grad_norm": 0.26946359872817993,
"learning_rate": 1.4566763423546856e-05,
"loss": 0.6257,
"step": 1609
},
{
"epoch": 1.4235190097259063,
"grad_norm": 0.2822984457015991,
"learning_rate": 1.4560520977691845e-05,
"loss": 0.612,
"step": 1610
},
{
"epoch": 1.4244031830238728,
"grad_norm": 0.28152552247047424,
"learning_rate": 1.4554276287200414e-05,
"loss": 0.6085,
"step": 1611
},
{
"epoch": 1.4252873563218391,
"grad_norm": 0.2824934422969818,
"learning_rate": 1.454802935514611e-05,
"loss": 0.6268,
"step": 1612
},
{
"epoch": 1.4261715296198054,
"grad_norm": 0.2584853172302246,
"learning_rate": 1.4541780184603615e-05,
"loss": 0.6171,
"step": 1613
},
{
"epoch": 1.427055702917772,
"grad_norm": 0.27463647723197937,
"learning_rate": 1.45355287786487e-05,
"loss": 0.6409,
"step": 1614
},
{
"epoch": 1.4279398762157383,
"grad_norm": 0.27769041061401367,
"learning_rate": 1.4529275140358228e-05,
"loss": 0.6315,
"step": 1615
},
{
"epoch": 1.4288240495137048,
"grad_norm": 0.2601848840713501,
"learning_rate": 1.4523019272810174e-05,
"loss": 0.6253,
"step": 1616
},
{
"epoch": 1.429708222811671,
"grad_norm": 0.2694016993045807,
"learning_rate": 1.4516761179083603e-05,
"loss": 0.5967,
"step": 1617
},
{
"epoch": 1.4305923961096374,
"grad_norm": 0.25726568698883057,
"learning_rate": 1.4510500862258679e-05,
"loss": 0.6337,
"step": 1618
},
{
"epoch": 1.431476569407604,
"grad_norm": 0.2654750347137451,
"learning_rate": 1.4504238325416653e-05,
"loss": 0.606,
"step": 1619
},
{
"epoch": 1.4323607427055702,
"grad_norm": 0.2612236440181732,
"learning_rate": 1.4497973571639883e-05,
"loss": 0.6291,
"step": 1620
},
{
"epoch": 1.4332449160035368,
"grad_norm": 0.2687664330005646,
"learning_rate": 1.4491706604011802e-05,
"loss": 0.6087,
"step": 1621
},
{
"epoch": 1.434129089301503,
"grad_norm": 0.2685811221599579,
"learning_rate": 1.4485437425616942e-05,
"loss": 0.6174,
"step": 1622
},
{
"epoch": 1.4350132625994694,
"grad_norm": 0.2548612058162689,
"learning_rate": 1.4479166039540922e-05,
"loss": 0.5973,
"step": 1623
},
{
"epoch": 1.435897435897436,
"grad_norm": 0.2613467574119568,
"learning_rate": 1.447289244887045e-05,
"loss": 0.6021,
"step": 1624
},
{
"epoch": 1.4367816091954024,
"grad_norm": 0.27904170751571655,
"learning_rate": 1.4466616656693309e-05,
"loss": 0.6436,
"step": 1625
},
{
"epoch": 1.4376657824933687,
"grad_norm": 0.2801571190357208,
"learning_rate": 1.4460338666098377e-05,
"loss": 0.6233,
"step": 1626
},
{
"epoch": 1.438549955791335,
"grad_norm": 0.26581257581710815,
"learning_rate": 1.4454058480175608e-05,
"loss": 0.6076,
"step": 1627
},
{
"epoch": 1.4394341290893016,
"grad_norm": 0.2661403715610504,
"learning_rate": 1.4447776102016038e-05,
"loss": 0.6374,
"step": 1628
},
{
"epoch": 1.4403183023872679,
"grad_norm": 0.27372390031814575,
"learning_rate": 1.4441491534711782e-05,
"loss": 0.6121,
"step": 1629
},
{
"epoch": 1.4412024756852344,
"grad_norm": 0.2639310359954834,
"learning_rate": 1.4435204781356031e-05,
"loss": 0.6167,
"step": 1630
},
{
"epoch": 1.4420866489832007,
"grad_norm": 0.27721524238586426,
"learning_rate": 1.4428915845043056e-05,
"loss": 0.6065,
"step": 1631
},
{
"epoch": 1.442970822281167,
"grad_norm": 0.26838260889053345,
"learning_rate": 1.4422624728868197e-05,
"loss": 0.6153,
"step": 1632
},
{
"epoch": 1.4438549955791335,
"grad_norm": 0.28284192085266113,
"learning_rate": 1.4416331435927876e-05,
"loss": 0.6125,
"step": 1633
},
{
"epoch": 1.4447391688770999,
"grad_norm": 0.2547760307788849,
"learning_rate": 1.441003596931957e-05,
"loss": 0.592,
"step": 1634
},
{
"epoch": 1.4456233421750664,
"grad_norm": 0.3029026687145233,
"learning_rate": 1.440373833214184e-05,
"loss": 0.6091,
"step": 1635
},
{
"epoch": 1.4465075154730327,
"grad_norm": 0.25786957144737244,
"learning_rate": 1.4397438527494316e-05,
"loss": 0.6183,
"step": 1636
},
{
"epoch": 1.447391688770999,
"grad_norm": 0.32107457518577576,
"learning_rate": 1.4391136558477686e-05,
"loss": 0.6146,
"step": 1637
},
{
"epoch": 1.4482758620689655,
"grad_norm": 0.2513173818588257,
"learning_rate": 1.4384832428193704e-05,
"loss": 0.6224,
"step": 1638
},
{
"epoch": 1.449160035366932,
"grad_norm": 0.3228299915790558,
"learning_rate": 1.4378526139745197e-05,
"loss": 0.6163,
"step": 1639
},
{
"epoch": 1.4500442086648984,
"grad_norm": 0.2520953118801117,
"learning_rate": 1.4372217696236045e-05,
"loss": 0.6127,
"step": 1640
},
{
"epoch": 1.4509283819628647,
"grad_norm": 0.2907947599887848,
"learning_rate": 1.4365907100771193e-05,
"loss": 0.612,
"step": 1641
},
{
"epoch": 1.4518125552608312,
"grad_norm": 0.27866512537002563,
"learning_rate": 1.4359594356456646e-05,
"loss": 0.6091,
"step": 1642
},
{
"epoch": 1.4526967285587975,
"grad_norm": 0.26458799839019775,
"learning_rate": 1.4353279466399461e-05,
"loss": 0.61,
"step": 1643
},
{
"epoch": 1.453580901856764,
"grad_norm": 0.28184905648231506,
"learning_rate": 1.4346962433707757e-05,
"loss": 0.6078,
"step": 1644
},
{
"epoch": 1.4544650751547303,
"grad_norm": 0.26177701354026794,
"learning_rate": 1.4340643261490703e-05,
"loss": 0.5962,
"step": 1645
},
{
"epoch": 1.4553492484526966,
"grad_norm": 0.28080907464027405,
"learning_rate": 1.4334321952858528e-05,
"loss": 0.5991,
"step": 1646
},
{
"epoch": 1.4562334217506632,
"grad_norm": 0.26058095693588257,
"learning_rate": 1.4327998510922505e-05,
"loss": 0.6185,
"step": 1647
},
{
"epoch": 1.4571175950486295,
"grad_norm": 0.27849799394607544,
"learning_rate": 1.4321672938794958e-05,
"loss": 0.606,
"step": 1648
},
{
"epoch": 1.458001768346596,
"grad_norm": 0.2471344769001007,
"learning_rate": 1.4315345239589266e-05,
"loss": 0.6291,
"step": 1649
},
{
"epoch": 1.4588859416445623,
"grad_norm": 0.2636317312717438,
"learning_rate": 1.4309015416419851e-05,
"loss": 0.6272,
"step": 1650
},
{
"epoch": 1.4597701149425286,
"grad_norm": 0.25596165657043457,
"learning_rate": 1.4302683472402175e-05,
"loss": 0.6163,
"step": 1651
},
{
"epoch": 1.4606542882404951,
"grad_norm": 0.25492262840270996,
"learning_rate": 1.4296349410652752e-05,
"loss": 0.6096,
"step": 1652
},
{
"epoch": 1.4615384615384617,
"grad_norm": 0.2608563005924225,
"learning_rate": 1.4290013234289133e-05,
"loss": 0.5977,
"step": 1653
},
{
"epoch": 1.462422634836428,
"grad_norm": 0.2614995241165161,
"learning_rate": 1.4283674946429913e-05,
"loss": 0.6208,
"step": 1654
},
{
"epoch": 1.4633068081343943,
"grad_norm": 0.27210062742233276,
"learning_rate": 1.4277334550194724e-05,
"loss": 0.6416,
"step": 1655
},
{
"epoch": 1.4641909814323608,
"grad_norm": 0.2666453421115875,
"learning_rate": 1.427099204870424e-05,
"loss": 0.6284,
"step": 1656
},
{
"epoch": 1.4650751547303271,
"grad_norm": 0.26994219422340393,
"learning_rate": 1.426464744508016e-05,
"loss": 0.623,
"step": 1657
},
{
"epoch": 1.4659593280282937,
"grad_norm": 0.2598974108695984,
"learning_rate": 1.4258300742445234e-05,
"loss": 0.6228,
"step": 1658
},
{
"epoch": 1.46684350132626,
"grad_norm": 0.26208171248435974,
"learning_rate": 1.4251951943923233e-05,
"loss": 0.6133,
"step": 1659
},
{
"epoch": 1.4677276746242263,
"grad_norm": 0.25028327107429504,
"learning_rate": 1.4245601052638967e-05,
"loss": 0.6259,
"step": 1660
},
{
"epoch": 1.4686118479221928,
"grad_norm": 0.2482742965221405,
"learning_rate": 1.4239248071718267e-05,
"loss": 0.6313,
"step": 1661
},
{
"epoch": 1.469496021220159,
"grad_norm": 0.2767847180366516,
"learning_rate": 1.4232893004287997e-05,
"loss": 0.632,
"step": 1662
},
{
"epoch": 1.4703801945181256,
"grad_norm": 0.25260740518569946,
"learning_rate": 1.4226535853476055e-05,
"loss": 0.6118,
"step": 1663
},
{
"epoch": 1.471264367816092,
"grad_norm": 0.2624533474445343,
"learning_rate": 1.4220176622411354e-05,
"loss": 0.616,
"step": 1664
},
{
"epoch": 1.4721485411140582,
"grad_norm": 0.2430107295513153,
"learning_rate": 1.4213815314223838e-05,
"loss": 0.5636,
"step": 1665
},
{
"epoch": 1.4730327144120248,
"grad_norm": 0.25626128911972046,
"learning_rate": 1.4207451932044465e-05,
"loss": 0.6145,
"step": 1666
},
{
"epoch": 1.4739168877099913,
"grad_norm": 0.2427784651517868,
"learning_rate": 1.4201086479005226e-05,
"loss": 0.5934,
"step": 1667
},
{
"epoch": 1.4748010610079576,
"grad_norm": 0.25667572021484375,
"learning_rate": 1.4194718958239116e-05,
"loss": 0.6233,
"step": 1668
},
{
"epoch": 1.475685234305924,
"grad_norm": 0.2500772774219513,
"learning_rate": 1.4188349372880166e-05,
"loss": 0.6225,
"step": 1669
},
{
"epoch": 1.4765694076038904,
"grad_norm": 0.24545742571353912,
"learning_rate": 1.4181977726063403e-05,
"loss": 0.6185,
"step": 1670
},
{
"epoch": 1.4774535809018567,
"grad_norm": 0.24441321194171906,
"learning_rate": 1.4175604020924886e-05,
"loss": 0.6046,
"step": 1671
},
{
"epoch": 1.4783377541998233,
"grad_norm": 0.2690092921257019,
"learning_rate": 1.416922826060168e-05,
"loss": 0.6257,
"step": 1672
},
{
"epoch": 1.4792219274977896,
"grad_norm": 0.24547670781612396,
"learning_rate": 1.4162850448231858e-05,
"loss": 0.6257,
"step": 1673
},
{
"epoch": 1.4801061007957559,
"grad_norm": 0.2690432667732239,
"learning_rate": 1.4156470586954507e-05,
"loss": 0.6279,
"step": 1674
},
{
"epoch": 1.4809902740937224,
"grad_norm": 0.26179981231689453,
"learning_rate": 1.4150088679909724e-05,
"loss": 0.6202,
"step": 1675
},
{
"epoch": 1.4818744473916887,
"grad_norm": 0.24629031121730804,
"learning_rate": 1.4143704730238612e-05,
"loss": 0.5924,
"step": 1676
},
{
"epoch": 1.4827586206896552,
"grad_norm": 0.27296724915504456,
"learning_rate": 1.4137318741083273e-05,
"loss": 0.6053,
"step": 1677
},
{
"epoch": 1.4836427939876216,
"grad_norm": 0.2660140097141266,
"learning_rate": 1.4130930715586823e-05,
"loss": 0.6046,
"step": 1678
},
{
"epoch": 1.4845269672855879,
"grad_norm": 0.2554752230644226,
"learning_rate": 1.4124540656893376e-05,
"loss": 0.6131,
"step": 1679
},
{
"epoch": 1.4854111405835544,
"grad_norm": 0.26546183228492737,
"learning_rate": 1.4118148568148038e-05,
"loss": 0.6289,
"step": 1680
},
{
"epoch": 1.4862953138815207,
"grad_norm": 0.25153717398643494,
"learning_rate": 1.4111754452496934e-05,
"loss": 0.5914,
"step": 1681
},
{
"epoch": 1.4871794871794872,
"grad_norm": 0.27413830161094666,
"learning_rate": 1.4105358313087166e-05,
"loss": 0.6503,
"step": 1682
},
{
"epoch": 1.4880636604774535,
"grad_norm": 0.25708523392677307,
"learning_rate": 1.4098960153066843e-05,
"loss": 0.6042,
"step": 1683
},
{
"epoch": 1.48894783377542,
"grad_norm": 0.2703273892402649,
"learning_rate": 1.4092559975585065e-05,
"loss": 0.6584,
"step": 1684
},
{
"epoch": 1.4898320070733864,
"grad_norm": 0.2573475241661072,
"learning_rate": 1.4086157783791929e-05,
"loss": 0.6214,
"step": 1685
},
{
"epoch": 1.490716180371353,
"grad_norm": 0.2578927278518677,
"learning_rate": 1.4079753580838524e-05,
"loss": 0.6058,
"step": 1686
},
{
"epoch": 1.4916003536693192,
"grad_norm": 0.24975483119487762,
"learning_rate": 1.407334736987692e-05,
"loss": 0.6159,
"step": 1687
},
{
"epoch": 1.4924845269672855,
"grad_norm": 0.25338032841682434,
"learning_rate": 1.406693915406018e-05,
"loss": 0.6312,
"step": 1688
},
{
"epoch": 1.493368700265252,
"grad_norm": 0.25621160864830017,
"learning_rate": 1.4060528936542363e-05,
"loss": 0.6354,
"step": 1689
},
{
"epoch": 1.4942528735632183,
"grad_norm": 0.24692730605602264,
"learning_rate": 1.4054116720478495e-05,
"loss": 0.6225,
"step": 1690
},
{
"epoch": 1.4951370468611849,
"grad_norm": 0.263579398393631,
"learning_rate": 1.40477025090246e-05,
"loss": 0.633,
"step": 1691
},
{
"epoch": 1.4960212201591512,
"grad_norm": 0.24871686100959778,
"learning_rate": 1.4041286305337685e-05,
"loss": 0.5982,
"step": 1692
},
{
"epoch": 1.4969053934571175,
"grad_norm": 0.25688108801841736,
"learning_rate": 1.4034868112575723e-05,
"loss": 0.6133,
"step": 1693
},
{
"epoch": 1.497789566755084,
"grad_norm": 0.2521092891693115,
"learning_rate": 1.4028447933897682e-05,
"loss": 0.5908,
"step": 1694
},
{
"epoch": 1.4986737400530503,
"grad_norm": 0.25001269578933716,
"learning_rate": 1.4022025772463494e-05,
"loss": 0.6018,
"step": 1695
},
{
"epoch": 1.4995579133510168,
"grad_norm": 0.2610168159008026,
"learning_rate": 1.4015601631434083e-05,
"loss": 0.6159,
"step": 1696
},
{
"epoch": 1.5004420866489832,
"grad_norm": 0.2772304117679596,
"learning_rate": 1.400917551397133e-05,
"loss": 0.6201,
"step": 1697
},
{
"epoch": 1.5013262599469495,
"grad_norm": 0.2558181583881378,
"learning_rate": 1.40027474232381e-05,
"loss": 0.6209,
"step": 1698
},
{
"epoch": 1.502210433244916,
"grad_norm": 0.2670685052871704,
"learning_rate": 1.3996317362398229e-05,
"loss": 0.6246,
"step": 1699
},
{
"epoch": 1.5030946065428825,
"grad_norm": 0.2649582326412201,
"learning_rate": 1.398988533461651e-05,
"loss": 0.6294,
"step": 1700
},
{
"epoch": 1.5039787798408488,
"grad_norm": 0.2683772146701813,
"learning_rate": 1.3983451343058725e-05,
"loss": 0.6038,
"step": 1701
},
{
"epoch": 1.5048629531388151,
"grad_norm": 0.27120858430862427,
"learning_rate": 1.3977015390891607e-05,
"loss": 0.6299,
"step": 1702
},
{
"epoch": 1.5057471264367817,
"grad_norm": 0.2734665870666504,
"learning_rate": 1.3970577481282854e-05,
"loss": 0.6133,
"step": 1703
},
{
"epoch": 1.506631299734748,
"grad_norm": 0.2701476216316223,
"learning_rate": 1.3964137617401135e-05,
"loss": 0.585,
"step": 1704
},
{
"epoch": 1.5075154730327145,
"grad_norm": 0.26021963357925415,
"learning_rate": 1.3957695802416083e-05,
"loss": 0.6247,
"step": 1705
},
{
"epoch": 1.5083996463306808,
"grad_norm": 0.2701626718044281,
"learning_rate": 1.3951252039498275e-05,
"loss": 0.6145,
"step": 1706
},
{
"epoch": 1.509283819628647,
"grad_norm": 0.25433680415153503,
"learning_rate": 1.3944806331819266e-05,
"loss": 0.6192,
"step": 1707
},
{
"epoch": 1.5101679929266136,
"grad_norm": 0.26368677616119385,
"learning_rate": 1.3938358682551557e-05,
"loss": 0.6454,
"step": 1708
},
{
"epoch": 1.5110521662245802,
"grad_norm": 0.2534036934375763,
"learning_rate": 1.3931909094868607e-05,
"loss": 0.6425,
"step": 1709
},
{
"epoch": 1.5119363395225465,
"grad_norm": 0.2661418616771698,
"learning_rate": 1.392545757194483e-05,
"loss": 0.6127,
"step": 1710
},
{
"epoch": 1.5128205128205128,
"grad_norm": 0.2524895668029785,
"learning_rate": 1.3919004116955593e-05,
"loss": 0.6032,
"step": 1711
},
{
"epoch": 1.513704686118479,
"grad_norm": 0.2640649080276489,
"learning_rate": 1.3912548733077213e-05,
"loss": 0.6286,
"step": 1712
},
{
"epoch": 1.5145888594164456,
"grad_norm": 0.2641122341156006,
"learning_rate": 1.3906091423486952e-05,
"loss": 0.6216,
"step": 1713
},
{
"epoch": 1.5154730327144121,
"grad_norm": 0.2638670802116394,
"learning_rate": 1.3899632191363031e-05,
"loss": 0.6056,
"step": 1714
},
{
"epoch": 1.5163572060123784,
"grad_norm": 0.2941979467868805,
"learning_rate": 1.3893171039884608e-05,
"loss": 0.5919,
"step": 1715
},
{
"epoch": 1.5172413793103448,
"grad_norm": 0.2603466212749481,
"learning_rate": 1.3886707972231786e-05,
"loss": 0.6153,
"step": 1716
},
{
"epoch": 1.5181255526083113,
"grad_norm": 0.2572587728500366,
"learning_rate": 1.3880242991585611e-05,
"loss": 0.6154,
"step": 1717
},
{
"epoch": 1.5190097259062776,
"grad_norm": 0.2507724165916443,
"learning_rate": 1.387377610112808e-05,
"loss": 0.5961,
"step": 1718
},
{
"epoch": 1.5198938992042441,
"grad_norm": 0.25951486825942993,
"learning_rate": 1.3867307304042118e-05,
"loss": 0.6274,
"step": 1719
},
{
"epoch": 1.5207780725022104,
"grad_norm": 0.26584023237228394,
"learning_rate": 1.3860836603511595e-05,
"loss": 0.6247,
"step": 1720
},
{
"epoch": 1.5216622458001767,
"grad_norm": 0.2652737498283386,
"learning_rate": 1.3854364002721312e-05,
"loss": 0.6375,
"step": 1721
},
{
"epoch": 1.5225464190981433,
"grad_norm": 0.25442221760749817,
"learning_rate": 1.3847889504857015e-05,
"loss": 0.6308,
"step": 1722
},
{
"epoch": 1.5234305923961098,
"grad_norm": 0.25107505917549133,
"learning_rate": 1.3841413113105375e-05,
"loss": 0.5975,
"step": 1723
},
{
"epoch": 1.524314765694076,
"grad_norm": 0.26791006326675415,
"learning_rate": 1.3834934830653998e-05,
"loss": 0.6126,
"step": 1724
},
{
"epoch": 1.5251989389920424,
"grad_norm": 0.26788094639778137,
"learning_rate": 1.3828454660691424e-05,
"loss": 0.614,
"step": 1725
},
{
"epoch": 1.5260831122900087,
"grad_norm": 0.24498192965984344,
"learning_rate": 1.3821972606407112e-05,
"loss": 0.6114,
"step": 1726
},
{
"epoch": 1.5269672855879752,
"grad_norm": 0.2690882086753845,
"learning_rate": 1.3815488670991461e-05,
"loss": 0.6127,
"step": 1727
},
{
"epoch": 1.5278514588859418,
"grad_norm": 0.25311627984046936,
"learning_rate": 1.3809002857635792e-05,
"loss": 0.6385,
"step": 1728
},
{
"epoch": 1.528735632183908,
"grad_norm": 0.24897980690002441,
"learning_rate": 1.380251516953234e-05,
"loss": 0.6173,
"step": 1729
},
{
"epoch": 1.5296198054818744,
"grad_norm": 0.2697485387325287,
"learning_rate": 1.379602560987428e-05,
"loss": 0.6208,
"step": 1730
},
{
"epoch": 1.530503978779841,
"grad_norm": 0.24217809736728668,
"learning_rate": 1.3789534181855692e-05,
"loss": 0.6006,
"step": 1731
},
{
"epoch": 1.5313881520778072,
"grad_norm": 0.2534368932247162,
"learning_rate": 1.3783040888671585e-05,
"loss": 0.6085,
"step": 1732
},
{
"epoch": 1.5322723253757737,
"grad_norm": 0.254496693611145,
"learning_rate": 1.3776545733517887e-05,
"loss": 0.6114,
"step": 1733
},
{
"epoch": 1.53315649867374,
"grad_norm": 0.24456462264060974,
"learning_rate": 1.3770048719591438e-05,
"loss": 0.6186,
"step": 1734
},
{
"epoch": 1.5340406719717063,
"grad_norm": 0.25019681453704834,
"learning_rate": 1.3763549850089995e-05,
"loss": 0.5868,
"step": 1735
},
{
"epoch": 1.5349248452696729,
"grad_norm": 0.2426844835281372,
"learning_rate": 1.375704912821222e-05,
"loss": 0.6271,
"step": 1736
},
{
"epoch": 1.5358090185676394,
"grad_norm": 0.27387118339538574,
"learning_rate": 1.3750546557157707e-05,
"loss": 0.6274,
"step": 1737
},
{
"epoch": 1.5366931918656057,
"grad_norm": 0.2572766840457916,
"learning_rate": 1.3744042140126936e-05,
"loss": 0.6365,
"step": 1738
},
{
"epoch": 1.537577365163572,
"grad_norm": 0.27447035908699036,
"learning_rate": 1.3737535880321313e-05,
"loss": 0.6209,
"step": 1739
},
{
"epoch": 1.5384615384615383,
"grad_norm": 0.250455766916275,
"learning_rate": 1.3731027780943142e-05,
"loss": 0.6251,
"step": 1740
},
{
"epoch": 1.5393457117595049,
"grad_norm": 0.2528349459171295,
"learning_rate": 1.372451784519564e-05,
"loss": 0.596,
"step": 1741
},
{
"epoch": 1.5402298850574714,
"grad_norm": 0.23870112001895905,
"learning_rate": 1.3718006076282914e-05,
"loss": 0.5761,
"step": 1742
},
{
"epoch": 1.5411140583554377,
"grad_norm": 0.27276116609573364,
"learning_rate": 1.3711492477409994e-05,
"loss": 0.6242,
"step": 1743
},
{
"epoch": 1.541998231653404,
"grad_norm": 0.25377824902534485,
"learning_rate": 1.3704977051782789e-05,
"loss": 0.6312,
"step": 1744
},
{
"epoch": 1.5428824049513705,
"grad_norm": 0.25649645924568176,
"learning_rate": 1.3698459802608125e-05,
"loss": 0.6268,
"step": 1745
},
{
"epoch": 1.5437665782493368,
"grad_norm": 0.25238850712776184,
"learning_rate": 1.3691940733093712e-05,
"loss": 0.6126,
"step": 1746
},
{
"epoch": 1.5446507515473034,
"grad_norm": 0.25166580080986023,
"learning_rate": 1.368541984644816e-05,
"loss": 0.6435,
"step": 1747
},
{
"epoch": 1.5455349248452697,
"grad_norm": 0.25979354977607727,
"learning_rate": 1.3678897145880985e-05,
"loss": 0.6171,
"step": 1748
},
{
"epoch": 1.546419098143236,
"grad_norm": 0.241341233253479,
"learning_rate": 1.3672372634602578e-05,
"loss": 0.5943,
"step": 1749
},
{
"epoch": 1.5473032714412025,
"grad_norm": 0.24532607197761536,
"learning_rate": 1.366584631582423e-05,
"loss": 0.6126,
"step": 1750
},
{
"epoch": 1.548187444739169,
"grad_norm": 0.2570018172264099,
"learning_rate": 1.3659318192758121e-05,
"loss": 0.6024,
"step": 1751
},
{
"epoch": 1.5490716180371353,
"grad_norm": 0.24660183489322662,
"learning_rate": 1.365278826861732e-05,
"loss": 0.6055,
"step": 1752
},
{
"epoch": 1.5499557913351016,
"grad_norm": 0.24087151885032654,
"learning_rate": 1.3646256546615782e-05,
"loss": 0.6208,
"step": 1753
},
{
"epoch": 1.550839964633068,
"grad_norm": 0.2592099905014038,
"learning_rate": 1.3639723029968343e-05,
"loss": 0.6168,
"step": 1754
},
{
"epoch": 1.5517241379310345,
"grad_norm": 0.26235145330429077,
"learning_rate": 1.3633187721890725e-05,
"loss": 0.6296,
"step": 1755
},
{
"epoch": 1.552608311229001,
"grad_norm": 0.24391554296016693,
"learning_rate": 1.3626650625599539e-05,
"loss": 0.6317,
"step": 1756
},
{
"epoch": 1.5534924845269673,
"grad_norm": 0.25423863530158997,
"learning_rate": 1.3620111744312261e-05,
"loss": 0.5863,
"step": 1757
},
{
"epoch": 1.5543766578249336,
"grad_norm": 0.24249733984470367,
"learning_rate": 1.3613571081247258e-05,
"loss": 0.601,
"step": 1758
},
{
"epoch": 1.5552608311229,
"grad_norm": 0.24746358394622803,
"learning_rate": 1.360702863962377e-05,
"loss": 0.6266,
"step": 1759
},
{
"epoch": 1.5561450044208665,
"grad_norm": 0.24695654213428497,
"learning_rate": 1.3600484422661908e-05,
"loss": 0.6206,
"step": 1760
},
{
"epoch": 1.557029177718833,
"grad_norm": 0.25013625621795654,
"learning_rate": 1.3593938433582667e-05,
"loss": 0.5858,
"step": 1761
},
{
"epoch": 1.5579133510167993,
"grad_norm": 0.2494516670703888,
"learning_rate": 1.3587390675607903e-05,
"loss": 0.6004,
"step": 1762
},
{
"epoch": 1.5587975243147656,
"grad_norm": 0.2742985188961029,
"learning_rate": 1.3580841151960353e-05,
"loss": 0.6119,
"step": 1763
},
{
"epoch": 1.5596816976127321,
"grad_norm": 0.2550061345100403,
"learning_rate": 1.3574289865863613e-05,
"loss": 0.6173,
"step": 1764
},
{
"epoch": 1.5605658709106986,
"grad_norm": 0.2518077492713928,
"learning_rate": 1.3567736820542155e-05,
"loss": 0.6122,
"step": 1765
},
{
"epoch": 1.561450044208665,
"grad_norm": 0.2619858980178833,
"learning_rate": 1.3561182019221311e-05,
"loss": 0.6182,
"step": 1766
},
{
"epoch": 1.5623342175066313,
"grad_norm": 0.26645082235336304,
"learning_rate": 1.3554625465127278e-05,
"loss": 0.615,
"step": 1767
},
{
"epoch": 1.5632183908045976,
"grad_norm": 0.24572229385375977,
"learning_rate": 1.3548067161487119e-05,
"loss": 0.5885,
"step": 1768
},
{
"epoch": 1.564102564102564,
"grad_norm": 0.2774527668952942,
"learning_rate": 1.3541507111528759e-05,
"loss": 0.6377,
"step": 1769
},
{
"epoch": 1.5649867374005306,
"grad_norm": 0.26037681102752686,
"learning_rate": 1.3534945318480976e-05,
"loss": 0.5814,
"step": 1770
},
{
"epoch": 1.565870910698497,
"grad_norm": 0.25602561235427856,
"learning_rate": 1.352838178557341e-05,
"loss": 0.6211,
"step": 1771
},
{
"epoch": 1.5667550839964632,
"grad_norm": 0.27241069078445435,
"learning_rate": 1.352181651603656e-05,
"loss": 0.622,
"step": 1772
},
{
"epoch": 1.5676392572944295,
"grad_norm": 0.2554008364677429,
"learning_rate": 1.3515249513101773e-05,
"loss": 0.642,
"step": 1773
},
{
"epoch": 1.568523430592396,
"grad_norm": 0.2532515227794647,
"learning_rate": 1.3508680780001257e-05,
"loss": 0.6035,
"step": 1774
},
{
"epoch": 1.5694076038903626,
"grad_norm": 0.2517213523387909,
"learning_rate": 1.3502110319968064e-05,
"loss": 0.6282,
"step": 1775
},
{
"epoch": 1.570291777188329,
"grad_norm": 0.2502802014350891,
"learning_rate": 1.3495538136236099e-05,
"loss": 0.6241,
"step": 1776
},
{
"epoch": 1.5711759504862952,
"grad_norm": 0.26083990931510925,
"learning_rate": 1.3488964232040123e-05,
"loss": 0.6145,
"step": 1777
},
{
"epoch": 1.5720601237842617,
"grad_norm": 0.25038981437683105,
"learning_rate": 1.3482388610615727e-05,
"loss": 0.6175,
"step": 1778
},
{
"epoch": 1.5729442970822283,
"grad_norm": 0.27218812704086304,
"learning_rate": 1.347581127519936e-05,
"loss": 0.649,
"step": 1779
},
{
"epoch": 1.5738284703801946,
"grad_norm": 0.2551039755344391,
"learning_rate": 1.3469232229028318e-05,
"loss": 0.6293,
"step": 1780
},
{
"epoch": 1.5747126436781609,
"grad_norm": 0.25399115681648254,
"learning_rate": 1.3462651475340724e-05,
"loss": 0.6282,
"step": 1781
},
{
"epoch": 1.5755968169761272,
"grad_norm": 0.27293142676353455,
"learning_rate": 1.3456069017375551e-05,
"loss": 0.6197,
"step": 1782
},
{
"epoch": 1.5764809902740937,
"grad_norm": 0.2623535096645355,
"learning_rate": 1.3449484858372615e-05,
"loss": 0.6247,
"step": 1783
},
{
"epoch": 1.5773651635720602,
"grad_norm": 0.2431313693523407,
"learning_rate": 1.3442899001572563e-05,
"loss": 0.6178,
"step": 1784
},
{
"epoch": 1.5782493368700266,
"grad_norm": 0.2483963966369629,
"learning_rate": 1.3436311450216872e-05,
"loss": 0.6139,
"step": 1785
},
{
"epoch": 1.5791335101679929,
"grad_norm": 0.2714947760105133,
"learning_rate": 1.3429722207547868e-05,
"loss": 0.6074,
"step": 1786
},
{
"epoch": 1.5800176834659592,
"grad_norm": 0.2673088610172272,
"learning_rate": 1.3423131276808697e-05,
"loss": 0.6365,
"step": 1787
},
{
"epoch": 1.5809018567639257,
"grad_norm": 0.25386568903923035,
"learning_rate": 1.341653866124334e-05,
"loss": 0.5923,
"step": 1788
},
{
"epoch": 1.5817860300618922,
"grad_norm": 0.24313196539878845,
"learning_rate": 1.3409944364096605e-05,
"loss": 0.5963,
"step": 1789
},
{
"epoch": 1.5826702033598585,
"grad_norm": 0.25083985924720764,
"learning_rate": 1.3403348388614138e-05,
"loss": 0.6062,
"step": 1790
},
{
"epoch": 1.5835543766578248,
"grad_norm": 0.28995099663734436,
"learning_rate": 1.3396750738042392e-05,
"loss": 0.6521,
"step": 1791
},
{
"epoch": 1.5844385499557914,
"grad_norm": 0.2613549530506134,
"learning_rate": 1.3390151415628663e-05,
"loss": 0.6109,
"step": 1792
},
{
"epoch": 1.585322723253758,
"grad_norm": 0.25872790813446045,
"learning_rate": 1.338355042462106e-05,
"loss": 0.6067,
"step": 1793
},
{
"epoch": 1.5862068965517242,
"grad_norm": 0.2530718743801117,
"learning_rate": 1.3376947768268518e-05,
"loss": 0.6224,
"step": 1794
},
{
"epoch": 1.5870910698496905,
"grad_norm": 0.25947898626327515,
"learning_rate": 1.3370343449820784e-05,
"loss": 0.6106,
"step": 1795
},
{
"epoch": 1.5879752431476568,
"grad_norm": 0.24885700643062592,
"learning_rate": 1.336373747252843e-05,
"loss": 0.6062,
"step": 1796
},
{
"epoch": 1.5888594164456233,
"grad_norm": 0.27202144265174866,
"learning_rate": 1.3357129839642846e-05,
"loss": 0.6148,
"step": 1797
},
{
"epoch": 1.5897435897435899,
"grad_norm": 0.2486962229013443,
"learning_rate": 1.3350520554416229e-05,
"loss": 0.5938,
"step": 1798
},
{
"epoch": 1.5906277630415562,
"grad_norm": 0.26366081833839417,
"learning_rate": 1.3343909620101598e-05,
"loss": 0.5958,
"step": 1799
},
{
"epoch": 1.5915119363395225,
"grad_norm": 0.25693845748901367,
"learning_rate": 1.3337297039952778e-05,
"loss": 0.6239,
"step": 1800
},
{
"epoch": 1.5923961096374888,
"grad_norm": 0.26489517092704773,
"learning_rate": 1.3330682817224403e-05,
"loss": 0.5924,
"step": 1801
},
{
"epoch": 1.5932802829354553,
"grad_norm": 0.24123670160770416,
"learning_rate": 1.3324066955171921e-05,
"loss": 0.6144,
"step": 1802
},
{
"epoch": 1.5941644562334218,
"grad_norm": 0.2794577181339264,
"learning_rate": 1.3317449457051585e-05,
"loss": 0.6204,
"step": 1803
},
{
"epoch": 1.5950486295313882,
"grad_norm": 0.2630338668823242,
"learning_rate": 1.3310830326120448e-05,
"loss": 0.5938,
"step": 1804
},
{
"epoch": 1.5959328028293545,
"grad_norm": 0.24684180319309235,
"learning_rate": 1.3304209565636373e-05,
"loss": 0.6165,
"step": 1805
},
{
"epoch": 1.596816976127321,
"grad_norm": 0.25309330224990845,
"learning_rate": 1.3297587178858023e-05,
"loss": 0.6182,
"step": 1806
},
{
"epoch": 1.5977011494252875,
"grad_norm": 0.25391507148742676,
"learning_rate": 1.3290963169044861e-05,
"loss": 0.6199,
"step": 1807
},
{
"epoch": 1.5985853227232538,
"grad_norm": 0.24555794894695282,
"learning_rate": 1.3284337539457151e-05,
"loss": 0.6235,
"step": 1808
},
{
"epoch": 1.5994694960212201,
"grad_norm": 0.24818289279937744,
"learning_rate": 1.3277710293355945e-05,
"loss": 0.6058,
"step": 1809
},
{
"epoch": 1.6003536693191864,
"grad_norm": 0.2772948741912842,
"learning_rate": 1.327108143400311e-05,
"loss": 0.6113,
"step": 1810
},
{
"epoch": 1.601237842617153,
"grad_norm": 0.2601780295372009,
"learning_rate": 1.3264450964661286e-05,
"loss": 0.6175,
"step": 1811
},
{
"epoch": 1.6021220159151195,
"grad_norm": 0.26507171988487244,
"learning_rate": 1.3257818888593916e-05,
"loss": 0.603,
"step": 1812
},
{
"epoch": 1.6030061892130858,
"grad_norm": 0.25596025586128235,
"learning_rate": 1.3251185209065236e-05,
"loss": 0.6071,
"step": 1813
},
{
"epoch": 1.603890362511052,
"grad_norm": 0.267049640417099,
"learning_rate": 1.3244549929340261e-05,
"loss": 0.6337,
"step": 1814
},
{
"epoch": 1.6047745358090184,
"grad_norm": 0.2585289180278778,
"learning_rate": 1.3237913052684804e-05,
"loss": 0.6199,
"step": 1815
},
{
"epoch": 1.605658709106985,
"grad_norm": 0.2737845182418823,
"learning_rate": 1.323127458236546e-05,
"loss": 0.6,
"step": 1816
},
{
"epoch": 1.6065428824049515,
"grad_norm": 0.26172712445259094,
"learning_rate": 1.3224634521649608e-05,
"loss": 0.5904,
"step": 1817
},
{
"epoch": 1.6074270557029178,
"grad_norm": 0.26924028992652893,
"learning_rate": 1.321799287380541e-05,
"loss": 0.6107,
"step": 1818
},
{
"epoch": 1.608311229000884,
"grad_norm": 0.25351423025131226,
"learning_rate": 1.321134964210181e-05,
"loss": 0.6031,
"step": 1819
},
{
"epoch": 1.6091954022988506,
"grad_norm": 0.2577957510948181,
"learning_rate": 1.320470482980853e-05,
"loss": 0.588,
"step": 1820
},
{
"epoch": 1.6100795755968171,
"grad_norm": 0.26303696632385254,
"learning_rate": 1.3198058440196074e-05,
"loss": 0.6305,
"step": 1821
},
{
"epoch": 1.6109637488947834,
"grad_norm": 0.24215184152126312,
"learning_rate": 1.3191410476535713e-05,
"loss": 0.6017,
"step": 1822
},
{
"epoch": 1.6118479221927497,
"grad_norm": 0.25478053092956543,
"learning_rate": 1.3184760942099504e-05,
"loss": 0.6108,
"step": 1823
},
{
"epoch": 1.612732095490716,
"grad_norm": 0.25570589303970337,
"learning_rate": 1.3178109840160269e-05,
"loss": 0.6153,
"step": 1824
},
{
"epoch": 1.6136162687886826,
"grad_norm": 0.25086236000061035,
"learning_rate": 1.3171457173991604e-05,
"loss": 0.5895,
"step": 1825
},
{
"epoch": 1.6145004420866491,
"grad_norm": 0.2633937895298004,
"learning_rate": 1.3164802946867883e-05,
"loss": 0.6339,
"step": 1826
},
{
"epoch": 1.6153846153846154,
"grad_norm": 0.26924607157707214,
"learning_rate": 1.315814716206423e-05,
"loss": 0.6432,
"step": 1827
},
{
"epoch": 1.6162687886825817,
"grad_norm": 0.26300549507141113,
"learning_rate": 1.3151489822856553e-05,
"loss": 0.6452,
"step": 1828
},
{
"epoch": 1.617152961980548,
"grad_norm": 0.26272982358932495,
"learning_rate": 1.314483093252152e-05,
"loss": 0.6377,
"step": 1829
},
{
"epoch": 1.6180371352785146,
"grad_norm": 0.2636886537075043,
"learning_rate": 1.3138170494336553e-05,
"loss": 0.6252,
"step": 1830
},
{
"epoch": 1.618921308576481,
"grad_norm": 0.2522881031036377,
"learning_rate": 1.3131508511579851e-05,
"loss": 0.6387,
"step": 1831
},
{
"epoch": 1.6198054818744474,
"grad_norm": 0.25250720977783203,
"learning_rate": 1.3124844987530364e-05,
"loss": 0.6165,
"step": 1832
},
{
"epoch": 1.6206896551724137,
"grad_norm": 0.2515047490596771,
"learning_rate": 1.3118179925467802e-05,
"loss": 0.6185,
"step": 1833
},
{
"epoch": 1.6215738284703802,
"grad_norm": 0.2588637173175812,
"learning_rate": 1.311151332867263e-05,
"loss": 0.6133,
"step": 1834
},
{
"epoch": 1.6224580017683468,
"grad_norm": 0.2522568106651306,
"learning_rate": 1.3104845200426074e-05,
"loss": 0.6301,
"step": 1835
},
{
"epoch": 1.623342175066313,
"grad_norm": 0.2531670928001404,
"learning_rate": 1.3098175544010115e-05,
"loss": 0.6396,
"step": 1836
},
{
"epoch": 1.6242263483642794,
"grad_norm": 0.2564160227775574,
"learning_rate": 1.3091504362707473e-05,
"loss": 0.6378,
"step": 1837
},
{
"epoch": 1.6251105216622457,
"grad_norm": 0.25758257508277893,
"learning_rate": 1.308483165980163e-05,
"loss": 0.6202,
"step": 1838
},
{
"epoch": 1.6259946949602122,
"grad_norm": 0.26820486783981323,
"learning_rate": 1.307815743857682e-05,
"loss": 0.6252,
"step": 1839
},
{
"epoch": 1.6268788682581787,
"grad_norm": 0.2564481198787689,
"learning_rate": 1.3071481702318013e-05,
"loss": 0.6093,
"step": 1840
},
{
"epoch": 1.627763041556145,
"grad_norm": 0.25531822443008423,
"learning_rate": 1.306480445431093e-05,
"loss": 0.6049,
"step": 1841
},
{
"epoch": 1.6286472148541113,
"grad_norm": 0.25176653265953064,
"learning_rate": 1.3058125697842039e-05,
"loss": 0.6209,
"step": 1842
},
{
"epoch": 1.6295313881520777,
"grad_norm": 0.25662490725517273,
"learning_rate": 1.3051445436198549e-05,
"loss": 0.6159,
"step": 1843
},
{
"epoch": 1.6304155614500442,
"grad_norm": 0.2448452115058899,
"learning_rate": 1.3044763672668404e-05,
"loss": 0.6156,
"step": 1844
},
{
"epoch": 1.6312997347480107,
"grad_norm": 0.25976210832595825,
"learning_rate": 1.3038080410540293e-05,
"loss": 0.6337,
"step": 1845
},
{
"epoch": 1.632183908045977,
"grad_norm": 0.2698408365249634,
"learning_rate": 1.3031395653103644e-05,
"loss": 0.6246,
"step": 1846
},
{
"epoch": 1.6330680813439433,
"grad_norm": 0.24739041924476624,
"learning_rate": 1.3024709403648617e-05,
"loss": 0.6176,
"step": 1847
},
{
"epoch": 1.6339522546419099,
"grad_norm": 0.25738534331321716,
"learning_rate": 1.3018021665466108e-05,
"loss": 0.6104,
"step": 1848
},
{
"epoch": 1.6348364279398764,
"grad_norm": 0.2510923147201538,
"learning_rate": 1.3011332441847743e-05,
"loss": 0.6022,
"step": 1849
},
{
"epoch": 1.6357206012378427,
"grad_norm": 0.249772310256958,
"learning_rate": 1.3004641736085888e-05,
"loss": 0.6366,
"step": 1850
},
{
"epoch": 1.636604774535809,
"grad_norm": 0.2612029016017914,
"learning_rate": 1.2997949551473625e-05,
"loss": 0.6015,
"step": 1851
},
{
"epoch": 1.6374889478337753,
"grad_norm": 0.2479582279920578,
"learning_rate": 1.2991255891304775e-05,
"loss": 0.6092,
"step": 1852
},
{
"epoch": 1.6383731211317418,
"grad_norm": 0.26224711537361145,
"learning_rate": 1.2984560758873882e-05,
"loss": 0.6582,
"step": 1853
},
{
"epoch": 1.6392572944297084,
"grad_norm": 0.2578248381614685,
"learning_rate": 1.2977864157476208e-05,
"loss": 0.6259,
"step": 1854
},
{
"epoch": 1.6401414677276747,
"grad_norm": 0.2551095485687256,
"learning_rate": 1.2971166090407749e-05,
"loss": 0.6153,
"step": 1855
},
{
"epoch": 1.641025641025641,
"grad_norm": 0.2525712847709656,
"learning_rate": 1.2964466560965216e-05,
"loss": 0.6137,
"step": 1856
},
{
"epoch": 1.6419098143236073,
"grad_norm": 0.25378960371017456,
"learning_rate": 1.2957765572446044e-05,
"loss": 0.6331,
"step": 1857
},
{
"epoch": 1.6427939876215738,
"grad_norm": 0.2599256634712219,
"learning_rate": 1.2951063128148378e-05,
"loss": 0.6286,
"step": 1858
},
{
"epoch": 1.6436781609195403,
"grad_norm": 0.24872106313705444,
"learning_rate": 1.2944359231371087e-05,
"loss": 0.6138,
"step": 1859
},
{
"epoch": 1.6445623342175066,
"grad_norm": 0.24780066311359406,
"learning_rate": 1.2937653885413753e-05,
"loss": 0.6388,
"step": 1860
},
{
"epoch": 1.645446507515473,
"grad_norm": 0.23609694838523865,
"learning_rate": 1.2930947093576668e-05,
"loss": 0.5868,
"step": 1861
},
{
"epoch": 1.6463306808134395,
"grad_norm": 0.2590028941631317,
"learning_rate": 1.2924238859160844e-05,
"loss": 0.6079,
"step": 1862
},
{
"epoch": 1.647214854111406,
"grad_norm": 0.24744683504104614,
"learning_rate": 1.2917529185467992e-05,
"loss": 0.6145,
"step": 1863
},
{
"epoch": 1.6480990274093723,
"grad_norm": 0.24845601618289948,
"learning_rate": 1.2910818075800539e-05,
"loss": 0.6137,
"step": 1864
},
{
"epoch": 1.6489832007073386,
"grad_norm": 0.2667432129383087,
"learning_rate": 1.2904105533461619e-05,
"loss": 0.6226,
"step": 1865
},
{
"epoch": 1.649867374005305,
"grad_norm": 0.24880369007587433,
"learning_rate": 1.2897391561755064e-05,
"loss": 0.6164,
"step": 1866
},
{
"epoch": 1.6507515473032714,
"grad_norm": 0.252419650554657,
"learning_rate": 1.2890676163985419e-05,
"loss": 0.5988,
"step": 1867
},
{
"epoch": 1.651635720601238,
"grad_norm": 0.2524411678314209,
"learning_rate": 1.2883959343457921e-05,
"loss": 0.6094,
"step": 1868
},
{
"epoch": 1.6525198938992043,
"grad_norm": 0.25243043899536133,
"learning_rate": 1.2877241103478522e-05,
"loss": 0.6112,
"step": 1869
},
{
"epoch": 1.6534040671971706,
"grad_norm": 0.2594641149044037,
"learning_rate": 1.287052144735385e-05,
"loss": 0.6252,
"step": 1870
},
{
"epoch": 1.654288240495137,
"grad_norm": 0.26543566584587097,
"learning_rate": 1.2863800378391253e-05,
"loss": 0.6202,
"step": 1871
},
{
"epoch": 1.6551724137931034,
"grad_norm": 0.24080465734004974,
"learning_rate": 1.2857077899898762e-05,
"loss": 0.6192,
"step": 1872
},
{
"epoch": 1.65605658709107,
"grad_norm": 0.26254191994667053,
"learning_rate": 1.2850354015185103e-05,
"loss": 0.6229,
"step": 1873
},
{
"epoch": 1.6569407603890363,
"grad_norm": 0.25726932287216187,
"learning_rate": 1.2843628727559694e-05,
"loss": 0.6308,
"step": 1874
},
{
"epoch": 1.6578249336870026,
"grad_norm": 0.2564818859100342,
"learning_rate": 1.283690204033265e-05,
"loss": 0.6148,
"step": 1875
},
{
"epoch": 1.658709106984969,
"grad_norm": 0.26183632016181946,
"learning_rate": 1.2830173956814764e-05,
"loss": 0.6251,
"step": 1876
},
{
"epoch": 1.6595932802829354,
"grad_norm": 0.24994239211082458,
"learning_rate": 1.2823444480317526e-05,
"loss": 0.5999,
"step": 1877
},
{
"epoch": 1.660477453580902,
"grad_norm": 0.26404857635498047,
"learning_rate": 1.2816713614153106e-05,
"loss": 0.6082,
"step": 1878
},
{
"epoch": 1.6613616268788682,
"grad_norm": 0.2531849145889282,
"learning_rate": 1.280998136163436e-05,
"loss": 0.6468,
"step": 1879
},
{
"epoch": 1.6622458001768345,
"grad_norm": 0.24235683679580688,
"learning_rate": 1.2803247726074828e-05,
"loss": 0.5962,
"step": 1880
},
{
"epoch": 1.663129973474801,
"grad_norm": 0.2451009601354599,
"learning_rate": 1.2796512710788722e-05,
"loss": 0.5845,
"step": 1881
},
{
"epoch": 1.6640141467727676,
"grad_norm": 0.26735374331474304,
"learning_rate": 1.278977631909095e-05,
"loss": 0.6522,
"step": 1882
},
{
"epoch": 1.664898320070734,
"grad_norm": 0.2563265264034271,
"learning_rate": 1.2783038554297077e-05,
"loss": 0.612,
"step": 1883
},
{
"epoch": 1.6657824933687002,
"grad_norm": 0.26463302969932556,
"learning_rate": 1.2776299419723359e-05,
"loss": 0.6159,
"step": 1884
},
{
"epoch": 1.6666666666666665,
"grad_norm": 0.2547277510166168,
"learning_rate": 1.276955891868672e-05,
"loss": 0.6236,
"step": 1885
},
{
"epoch": 1.667550839964633,
"grad_norm": 0.2560112178325653,
"learning_rate": 1.2762817054504762e-05,
"loss": 0.6214,
"step": 1886
},
{
"epoch": 1.6684350132625996,
"grad_norm": 0.24871279299259186,
"learning_rate": 1.2756073830495748e-05,
"loss": 0.5878,
"step": 1887
},
{
"epoch": 1.6693191865605659,
"grad_norm": 0.2565147280693054,
"learning_rate": 1.2749329249978618e-05,
"loss": 0.6178,
"step": 1888
},
{
"epoch": 1.6702033598585322,
"grad_norm": 0.254533052444458,
"learning_rate": 1.2742583316272976e-05,
"loss": 0.6214,
"step": 1889
},
{
"epoch": 1.6710875331564987,
"grad_norm": 0.2608291804790497,
"learning_rate": 1.2735836032699093e-05,
"loss": 0.593,
"step": 1890
},
{
"epoch": 1.671971706454465,
"grad_norm": 0.2565882205963135,
"learning_rate": 1.2729087402577913e-05,
"loss": 0.6017,
"step": 1891
},
{
"epoch": 1.6728558797524316,
"grad_norm": 0.2437106817960739,
"learning_rate": 1.2722337429231027e-05,
"loss": 0.5928,
"step": 1892
},
{
"epoch": 1.6737400530503979,
"grad_norm": 0.2529923915863037,
"learning_rate": 1.2715586115980699e-05,
"loss": 0.6299,
"step": 1893
},
{
"epoch": 1.6746242263483642,
"grad_norm": 0.24945084750652313,
"learning_rate": 1.2708833466149844e-05,
"loss": 0.5993,
"step": 1894
},
{
"epoch": 1.6755083996463307,
"grad_norm": 0.25257444381713867,
"learning_rate": 1.2702079483062042e-05,
"loss": 0.6102,
"step": 1895
},
{
"epoch": 1.6763925729442972,
"grad_norm": 0.25613993406295776,
"learning_rate": 1.2695324170041528e-05,
"loss": 0.634,
"step": 1896
},
{
"epoch": 1.6772767462422635,
"grad_norm": 0.2612609565258026,
"learning_rate": 1.2688567530413182e-05,
"loss": 0.6235,
"step": 1897
},
{
"epoch": 1.6781609195402298,
"grad_norm": 0.26202070713043213,
"learning_rate": 1.2681809567502562e-05,
"loss": 0.624,
"step": 1898
},
{
"epoch": 1.6790450928381961,
"grad_norm": 0.25812482833862305,
"learning_rate": 1.2675050284635838e-05,
"loss": 0.6218,
"step": 1899
},
{
"epoch": 1.6799292661361627,
"grad_norm": 0.2613903880119324,
"learning_rate": 1.2668289685139872e-05,
"loss": 0.6205,
"step": 1900
},
{
"epoch": 1.6808134394341292,
"grad_norm": 0.25094321370124817,
"learning_rate": 1.2661527772342144e-05,
"loss": 0.6034,
"step": 1901
},
{
"epoch": 1.6816976127320955,
"grad_norm": 0.2618274390697479,
"learning_rate": 1.2654764549570788e-05,
"loss": 0.6371,
"step": 1902
},
{
"epoch": 1.6825817860300618,
"grad_norm": 0.24622586369514465,
"learning_rate": 1.2648000020154591e-05,
"loss": 0.5916,
"step": 1903
},
{
"epoch": 1.6834659593280283,
"grad_norm": 0.2403654307126999,
"learning_rate": 1.2641234187422975e-05,
"loss": 0.5983,
"step": 1904
},
{
"epoch": 1.6843501326259946,
"grad_norm": 0.25834015011787415,
"learning_rate": 1.2634467054706008e-05,
"loss": 0.6383,
"step": 1905
},
{
"epoch": 1.6852343059239612,
"grad_norm": 0.24597378075122833,
"learning_rate": 1.2627698625334393e-05,
"loss": 0.6137,
"step": 1906
},
{
"epoch": 1.6861184792219275,
"grad_norm": 0.2524603009223938,
"learning_rate": 1.2620928902639474e-05,
"loss": 0.5986,
"step": 1907
},
{
"epoch": 1.6870026525198938,
"grad_norm": 0.25663885474205017,
"learning_rate": 1.2614157889953231e-05,
"loss": 0.6165,
"step": 1908
},
{
"epoch": 1.6878868258178603,
"grad_norm": 0.25844958424568176,
"learning_rate": 1.2607385590608281e-05,
"loss": 0.6105,
"step": 1909
},
{
"epoch": 1.6887709991158268,
"grad_norm": 0.2581733465194702,
"learning_rate": 1.260061200793787e-05,
"loss": 0.6248,
"step": 1910
},
{
"epoch": 1.6896551724137931,
"grad_norm": 0.26633158326148987,
"learning_rate": 1.2593837145275882e-05,
"loss": 0.6263,
"step": 1911
},
{
"epoch": 1.6905393457117595,
"grad_norm": 0.2782621383666992,
"learning_rate": 1.2587061005956822e-05,
"loss": 0.6165,
"step": 1912
},
{
"epoch": 1.6914235190097258,
"grad_norm": 0.2550716996192932,
"learning_rate": 1.2580283593315831e-05,
"loss": 0.6442,
"step": 1913
},
{
"epoch": 1.6923076923076923,
"grad_norm": 0.2605842053890228,
"learning_rate": 1.2573504910688676e-05,
"loss": 0.641,
"step": 1914
},
{
"epoch": 1.6931918656056588,
"grad_norm": 0.26414069533348083,
"learning_rate": 1.2566724961411747e-05,
"loss": 0.6062,
"step": 1915
},
{
"epoch": 1.6940760389036251,
"grad_norm": 0.24569210410118103,
"learning_rate": 1.2559943748822054e-05,
"loss": 0.6068,
"step": 1916
},
{
"epoch": 1.6949602122015914,
"grad_norm": 0.2547416090965271,
"learning_rate": 1.2553161276257235e-05,
"loss": 0.613,
"step": 1917
},
{
"epoch": 1.695844385499558,
"grad_norm": 0.2757764756679535,
"learning_rate": 1.2546377547055547e-05,
"loss": 0.6045,
"step": 1918
},
{
"epoch": 1.6967285587975243,
"grad_norm": 0.25981855392456055,
"learning_rate": 1.2539592564555863e-05,
"loss": 0.623,
"step": 1919
},
{
"epoch": 1.6976127320954908,
"grad_norm": 0.27036339044570923,
"learning_rate": 1.2532806332097674e-05,
"loss": 0.6104,
"step": 1920
},
{
"epoch": 1.698496905393457,
"grad_norm": 0.2512921988964081,
"learning_rate": 1.2526018853021085e-05,
"loss": 0.6058,
"step": 1921
},
{
"epoch": 1.6993810786914234,
"grad_norm": 0.24836361408233643,
"learning_rate": 1.251923013066682e-05,
"loss": 0.5878,
"step": 1922
},
{
"epoch": 1.70026525198939,
"grad_norm": 0.23754893243312836,
"learning_rate": 1.2512440168376206e-05,
"loss": 0.5907,
"step": 1923
},
{
"epoch": 1.7011494252873565,
"grad_norm": 0.24704672396183014,
"learning_rate": 1.2505648969491184e-05,
"loss": 0.6087,
"step": 1924
},
{
"epoch": 1.7020335985853228,
"grad_norm": 0.2409648448228836,
"learning_rate": 1.2498856537354311e-05,
"loss": 0.5949,
"step": 1925
},
{
"epoch": 1.702917771883289,
"grad_norm": 0.2556937634944916,
"learning_rate": 1.2492062875308736e-05,
"loss": 0.6312,
"step": 1926
},
{
"epoch": 1.7038019451812554,
"grad_norm": 0.24228796362876892,
"learning_rate": 1.2485267986698233e-05,
"loss": 0.576,
"step": 1927
},
{
"epoch": 1.704686118479222,
"grad_norm": 0.2590543031692505,
"learning_rate": 1.247847187486716e-05,
"loss": 0.6127,
"step": 1928
},
{
"epoch": 1.7055702917771884,
"grad_norm": 0.24657118320465088,
"learning_rate": 1.247167454316049e-05,
"loss": 0.6002,
"step": 1929
},
{
"epoch": 1.7064544650751547,
"grad_norm": 0.24121209979057312,
"learning_rate": 1.2464875994923787e-05,
"loss": 0.6007,
"step": 1930
},
{
"epoch": 1.707338638373121,
"grad_norm": 0.24531449377536774,
"learning_rate": 1.2458076233503227e-05,
"loss": 0.613,
"step": 1931
},
{
"epoch": 1.7082228116710876,
"grad_norm": 0.24562785029411316,
"learning_rate": 1.245127526224557e-05,
"loss": 0.616,
"step": 1932
},
{
"epoch": 1.7091069849690539,
"grad_norm": 0.2482648491859436,
"learning_rate": 1.2444473084498174e-05,
"loss": 0.6048,
"step": 1933
},
{
"epoch": 1.7099911582670204,
"grad_norm": 0.2504918575286865,
"learning_rate": 1.2437669703609002e-05,
"loss": 0.6187,
"step": 1934
},
{
"epoch": 1.7108753315649867,
"grad_norm": 0.25419121980667114,
"learning_rate": 1.2430865122926587e-05,
"loss": 0.6131,
"step": 1935
},
{
"epoch": 1.711759504862953,
"grad_norm": 0.25046634674072266,
"learning_rate": 1.2424059345800079e-05,
"loss": 0.6103,
"step": 1936
},
{
"epoch": 1.7126436781609196,
"grad_norm": 0.25423553586006165,
"learning_rate": 1.24172523755792e-05,
"loss": 0.6066,
"step": 1937
},
{
"epoch": 1.713527851458886,
"grad_norm": 0.247236967086792,
"learning_rate": 1.241044421561426e-05,
"loss": 0.6203,
"step": 1938
},
{
"epoch": 1.7144120247568524,
"grad_norm": 0.24983461201190948,
"learning_rate": 1.2403634869256158e-05,
"loss": 0.6363,
"step": 1939
},
{
"epoch": 1.7152961980548187,
"grad_norm": 0.2523598074913025,
"learning_rate": 1.2396824339856382e-05,
"loss": 0.6115,
"step": 1940
},
{
"epoch": 1.716180371352785,
"grad_norm": 0.24501970410346985,
"learning_rate": 1.2390012630766992e-05,
"loss": 0.5994,
"step": 1941
},
{
"epoch": 1.7170645446507515,
"grad_norm": 0.2428794950246811,
"learning_rate": 1.2383199745340636e-05,
"loss": 0.6108,
"step": 1942
},
{
"epoch": 1.717948717948718,
"grad_norm": 0.25641992688179016,
"learning_rate": 1.2376385686930537e-05,
"loss": 0.6508,
"step": 1943
},
{
"epoch": 1.7188328912466844,
"grad_norm": 0.2611239552497864,
"learning_rate": 1.2369570458890495e-05,
"loss": 0.642,
"step": 1944
},
{
"epoch": 1.7197170645446507,
"grad_norm": 0.23916538059711456,
"learning_rate": 1.236275406457489e-05,
"loss": 0.6055,
"step": 1945
},
{
"epoch": 1.7206012378426172,
"grad_norm": 0.2416297346353531,
"learning_rate": 1.2355936507338674e-05,
"loss": 0.6163,
"step": 1946
},
{
"epoch": 1.7214854111405835,
"grad_norm": 0.24319541454315186,
"learning_rate": 1.2349117790537374e-05,
"loss": 0.6126,
"step": 1947
},
{
"epoch": 1.72236958443855,
"grad_norm": 0.23112298548221588,
"learning_rate": 1.2342297917527075e-05,
"loss": 0.5902,
"step": 1948
},
{
"epoch": 1.7232537577365163,
"grad_norm": 0.24412156641483307,
"learning_rate": 1.233547689166445e-05,
"loss": 0.6172,
"step": 1949
},
{
"epoch": 1.7241379310344827,
"grad_norm": 0.24142323434352875,
"learning_rate": 1.2328654716306727e-05,
"loss": 0.6152,
"step": 1950
},
{
"epoch": 1.7250221043324492,
"grad_norm": 0.24941980838775635,
"learning_rate": 1.2321831394811704e-05,
"loss": 0.6312,
"step": 1951
},
{
"epoch": 1.7259062776304157,
"grad_norm": 0.2508319020271301,
"learning_rate": 1.2315006930537739e-05,
"loss": 0.5988,
"step": 1952
},
{
"epoch": 1.726790450928382,
"grad_norm": 0.2452768087387085,
"learning_rate": 1.2308181326843756e-05,
"loss": 0.6155,
"step": 1953
},
{
"epoch": 1.7276746242263483,
"grad_norm": 0.2500527799129486,
"learning_rate": 1.2301354587089245e-05,
"loss": 0.6214,
"step": 1954
},
{
"epoch": 1.7285587975243146,
"grad_norm": 0.23793424665927887,
"learning_rate": 1.229452671463424e-05,
"loss": 0.5924,
"step": 1955
},
{
"epoch": 1.7294429708222812,
"grad_norm": 0.24913889169692993,
"learning_rate": 1.228769771283935e-05,
"loss": 0.5868,
"step": 1956
},
{
"epoch": 1.7303271441202477,
"grad_norm": 0.2588574290275574,
"learning_rate": 1.2280867585065727e-05,
"loss": 0.6383,
"step": 1957
},
{
"epoch": 1.731211317418214,
"grad_norm": 0.2564661204814911,
"learning_rate": 1.2274036334675087e-05,
"loss": 0.6284,
"step": 1958
},
{
"epoch": 1.7320954907161803,
"grad_norm": 0.24482408165931702,
"learning_rate": 1.226720396502969e-05,
"loss": 0.6047,
"step": 1959
},
{
"epoch": 1.7329796640141468,
"grad_norm": 0.24826249480247498,
"learning_rate": 1.226037047949235e-05,
"loss": 0.6225,
"step": 1960
},
{
"epoch": 1.7338638373121131,
"grad_norm": 0.24558793008327484,
"learning_rate": 1.225353588142643e-05,
"loss": 0.5953,
"step": 1961
},
{
"epoch": 1.7347480106100797,
"grad_norm": 0.2439793348312378,
"learning_rate": 1.2246700174195845e-05,
"loss": 0.6224,
"step": 1962
},
{
"epoch": 1.735632183908046,
"grad_norm": 0.2518351376056671,
"learning_rate": 1.2239863361165047e-05,
"loss": 0.6068,
"step": 1963
},
{
"epoch": 1.7365163572060123,
"grad_norm": 0.24519343674182892,
"learning_rate": 1.2233025445699045e-05,
"loss": 0.6293,
"step": 1964
},
{
"epoch": 1.7374005305039788,
"grad_norm": 0.25860780477523804,
"learning_rate": 1.2226186431163375e-05,
"loss": 0.6248,
"step": 1965
},
{
"epoch": 1.7382847038019453,
"grad_norm": 0.25678515434265137,
"learning_rate": 1.2219346320924125e-05,
"loss": 0.617,
"step": 1966
},
{
"epoch": 1.7391688770999116,
"grad_norm": 0.26241329312324524,
"learning_rate": 1.2212505118347924e-05,
"loss": 0.6212,
"step": 1967
},
{
"epoch": 1.740053050397878,
"grad_norm": 0.24439263343811035,
"learning_rate": 1.2205662826801925e-05,
"loss": 0.6167,
"step": 1968
},
{
"epoch": 1.7409372236958442,
"grad_norm": 0.25870800018310547,
"learning_rate": 1.2198819449653833e-05,
"loss": 0.6136,
"step": 1969
},
{
"epoch": 1.7418213969938108,
"grad_norm": 0.24154479801654816,
"learning_rate": 1.219197499027188e-05,
"loss": 0.6029,
"step": 1970
},
{
"epoch": 1.7427055702917773,
"grad_norm": 0.2538394033908844,
"learning_rate": 1.2185129452024825e-05,
"loss": 0.6299,
"step": 1971
},
{
"epoch": 1.7435897435897436,
"grad_norm": 0.25859585404396057,
"learning_rate": 1.217828283828197e-05,
"loss": 0.6062,
"step": 1972
},
{
"epoch": 1.74447391688771,
"grad_norm": 0.2737211287021637,
"learning_rate": 1.2171435152413136e-05,
"loss": 0.6346,
"step": 1973
},
{
"epoch": 1.7453580901856764,
"grad_norm": 0.2591975927352905,
"learning_rate": 1.2164586397788678e-05,
"loss": 0.6295,
"step": 1974
},
{
"epoch": 1.7462422634836428,
"grad_norm": 0.24078160524368286,
"learning_rate": 1.2157736577779472e-05,
"loss": 0.5875,
"step": 1975
},
{
"epoch": 1.7471264367816093,
"grad_norm": 0.2600054442882538,
"learning_rate": 1.2150885695756927e-05,
"loss": 0.6014,
"step": 1976
},
{
"epoch": 1.7480106100795756,
"grad_norm": 0.2559686303138733,
"learning_rate": 1.2144033755092965e-05,
"loss": 0.6198,
"step": 1977
},
{
"epoch": 1.748894783377542,
"grad_norm": 0.24742116034030914,
"learning_rate": 1.2137180759160035e-05,
"loss": 0.6274,
"step": 1978
},
{
"epoch": 1.7497789566755084,
"grad_norm": 0.2749579846858978,
"learning_rate": 1.2130326711331102e-05,
"loss": 0.6315,
"step": 1979
},
{
"epoch": 1.750663129973475,
"grad_norm": 0.2606034278869629,
"learning_rate": 1.212347161497965e-05,
"loss": 0.6213,
"step": 1980
},
{
"epoch": 1.7515473032714413,
"grad_norm": 0.2673099637031555,
"learning_rate": 1.211661547347968e-05,
"loss": 0.6128,
"step": 1981
},
{
"epoch": 1.7524314765694076,
"grad_norm": 0.2540912330150604,
"learning_rate": 1.2109758290205704e-05,
"loss": 0.6208,
"step": 1982
},
{
"epoch": 1.7533156498673739,
"grad_norm": 0.2522587776184082,
"learning_rate": 1.2102900068532757e-05,
"loss": 0.6136,
"step": 1983
},
{
"epoch": 1.7541998231653404,
"grad_norm": 0.25229498744010925,
"learning_rate": 1.2096040811836369e-05,
"loss": 0.6111,
"step": 1984
},
{
"epoch": 1.755083996463307,
"grad_norm": 0.2528427541255951,
"learning_rate": 1.2089180523492592e-05,
"loss": 0.6093,
"step": 1985
},
{
"epoch": 1.7559681697612732,
"grad_norm": 0.2393440157175064,
"learning_rate": 1.2082319206877982e-05,
"loss": 0.5893,
"step": 1986
},
{
"epoch": 1.7568523430592395,
"grad_norm": 0.2507714629173279,
"learning_rate": 1.2075456865369599e-05,
"loss": 0.5982,
"step": 1987
},
{
"epoch": 1.7577365163572058,
"grad_norm": 0.2586420178413391,
"learning_rate": 1.2068593502345011e-05,
"loss": 0.6092,
"step": 1988
},
{
"epoch": 1.7586206896551724,
"grad_norm": 0.25351208448410034,
"learning_rate": 1.2061729121182284e-05,
"loss": 0.6051,
"step": 1989
},
{
"epoch": 1.759504862953139,
"grad_norm": 0.25788959860801697,
"learning_rate": 1.2054863725259995e-05,
"loss": 0.6161,
"step": 1990
},
{
"epoch": 1.7603890362511052,
"grad_norm": 0.2491181492805481,
"learning_rate": 1.2047997317957206e-05,
"loss": 0.6057,
"step": 1991
},
{
"epoch": 1.7612732095490715,
"grad_norm": 0.23578540980815887,
"learning_rate": 1.2041129902653488e-05,
"loss": 0.5975,
"step": 1992
},
{
"epoch": 1.762157382847038,
"grad_norm": 0.25960808992385864,
"learning_rate": 1.2034261482728904e-05,
"loss": 0.6166,
"step": 1993
},
{
"epoch": 1.7630415561450046,
"grad_norm": 0.24058817327022552,
"learning_rate": 1.2027392061564012e-05,
"loss": 0.5818,
"step": 1994
},
{
"epoch": 1.7639257294429709,
"grad_norm": 0.260195255279541,
"learning_rate": 1.2020521642539858e-05,
"loss": 0.6198,
"step": 1995
},
{
"epoch": 1.7648099027409372,
"grad_norm": 0.2536511719226837,
"learning_rate": 1.2013650229037997e-05,
"loss": 0.6034,
"step": 1996
},
{
"epoch": 1.7656940760389035,
"grad_norm": 0.2564987242221832,
"learning_rate": 1.2006777824440443e-05,
"loss": 0.6084,
"step": 1997
},
{
"epoch": 1.76657824933687,
"grad_norm": 0.25996366143226624,
"learning_rate": 1.1999904432129729e-05,
"loss": 0.6236,
"step": 1998
},
{
"epoch": 1.7674624226348365,
"grad_norm": 0.24751466512680054,
"learning_rate": 1.199303005548886e-05,
"loss": 0.5851,
"step": 1999
},
{
"epoch": 1.7683465959328029,
"grad_norm": 0.25864890217781067,
"learning_rate": 1.1986154697901313e-05,
"loss": 0.6028,
"step": 2000
},
{
"epoch": 1.7683465959328029,
"eval_loss": 0.562258780002594,
"eval_runtime": 635.8032,
"eval_samples_per_second": 9.487,
"eval_steps_per_second": 1.186,
"step": 2000
},
{
"epoch": 1.7692307692307692,
"grad_norm": 0.2799210548400879,
"learning_rate": 1.1979278362751072e-05,
"loss": 0.6166,
"step": 2001
},
{
"epoch": 1.7701149425287355,
"grad_norm": 0.2537185847759247,
"learning_rate": 1.1972401053422587e-05,
"loss": 0.6253,
"step": 2002
},
{
"epoch": 1.770999115826702,
"grad_norm": 0.2558802664279938,
"learning_rate": 1.1965522773300794e-05,
"loss": 0.5981,
"step": 2003
},
{
"epoch": 1.7718832891246685,
"grad_norm": 0.2514832615852356,
"learning_rate": 1.19586435257711e-05,
"loss": 0.5999,
"step": 2004
},
{
"epoch": 1.7727674624226348,
"grad_norm": 0.24692074954509735,
"learning_rate": 1.1951763314219393e-05,
"loss": 0.6129,
"step": 2005
},
{
"epoch": 1.7736516357206011,
"grad_norm": 0.2631712555885315,
"learning_rate": 1.1944882142032037e-05,
"loss": 0.6043,
"step": 2006
},
{
"epoch": 1.7745358090185677,
"grad_norm": 0.24620743095874786,
"learning_rate": 1.1938000012595868e-05,
"loss": 0.6118,
"step": 2007
},
{
"epoch": 1.7754199823165342,
"grad_norm": 0.261752724647522,
"learning_rate": 1.1931116929298185e-05,
"loss": 0.6377,
"step": 2008
},
{
"epoch": 1.7763041556145005,
"grad_norm": 0.26028209924697876,
"learning_rate": 1.1924232895526769e-05,
"loss": 0.6123,
"step": 2009
},
{
"epoch": 1.7771883289124668,
"grad_norm": 0.3921703100204468,
"learning_rate": 1.1917347914669862e-05,
"loss": 0.6494,
"step": 2010
},
{
"epoch": 1.7780725022104331,
"grad_norm": 0.25247621536254883,
"learning_rate": 1.1910461990116167e-05,
"loss": 0.5903,
"step": 2011
},
{
"epoch": 1.7789566755083996,
"grad_norm": 0.25848114490509033,
"learning_rate": 1.1903575125254867e-05,
"loss": 0.6278,
"step": 2012
},
{
"epoch": 1.7798408488063662,
"grad_norm": 0.262572705745697,
"learning_rate": 1.1896687323475593e-05,
"loss": 0.6079,
"step": 2013
},
{
"epoch": 1.7807250221043325,
"grad_norm": 0.24781905114650726,
"learning_rate": 1.1889798588168444e-05,
"loss": 0.6201,
"step": 2014
},
{
"epoch": 1.7816091954022988,
"grad_norm": 0.25110140442848206,
"learning_rate": 1.1882908922723977e-05,
"loss": 0.5973,
"step": 2015
},
{
"epoch": 1.782493368700265,
"grad_norm": 0.2504766285419464,
"learning_rate": 1.1876018330533205e-05,
"loss": 0.6139,
"step": 2016
},
{
"epoch": 1.7833775419982316,
"grad_norm": 0.2606179714202881,
"learning_rate": 1.1869126814987601e-05,
"loss": 0.6575,
"step": 2017
},
{
"epoch": 1.7842617152961981,
"grad_norm": 0.2579403519630432,
"learning_rate": 1.1862234379479088e-05,
"loss": 0.6032,
"step": 2018
},
{
"epoch": 1.7851458885941645,
"grad_norm": 0.2512941062450409,
"learning_rate": 1.1855341027400049e-05,
"loss": 0.626,
"step": 2019
},
{
"epoch": 1.7860300618921308,
"grad_norm": 0.3081704378128052,
"learning_rate": 1.1848446762143304e-05,
"loss": 0.594,
"step": 2020
},
{
"epoch": 1.7869142351900973,
"grad_norm": 0.2567075490951538,
"learning_rate": 1.1841551587102142e-05,
"loss": 0.5817,
"step": 2021
},
{
"epoch": 1.7877984084880638,
"grad_norm": 0.25040769577026367,
"learning_rate": 1.1834655505670285e-05,
"loss": 0.608,
"step": 2022
},
{
"epoch": 1.7886825817860301,
"grad_norm": 0.24940884113311768,
"learning_rate": 1.1827758521241908e-05,
"loss": 0.6001,
"step": 2023
},
{
"epoch": 1.7895667550839964,
"grad_norm": 0.2564821243286133,
"learning_rate": 1.182086063721162e-05,
"loss": 0.6013,
"step": 2024
},
{
"epoch": 1.7904509283819627,
"grad_norm": 0.24460622668266296,
"learning_rate": 1.1813961856974492e-05,
"loss": 0.6046,
"step": 2025
},
{
"epoch": 1.7913351016799293,
"grad_norm": 0.25175201892852783,
"learning_rate": 1.1807062183926022e-05,
"loss": 0.6187,
"step": 2026
},
{
"epoch": 1.7922192749778958,
"grad_norm": 0.2521284520626068,
"learning_rate": 1.180016162146214e-05,
"loss": 0.6006,
"step": 2027
},
{
"epoch": 1.793103448275862,
"grad_norm": 0.25127822160720825,
"learning_rate": 1.1793260172979239e-05,
"loss": 0.6248,
"step": 2028
},
{
"epoch": 1.7939876215738284,
"grad_norm": 0.2655341923236847,
"learning_rate": 1.1786357841874125e-05,
"loss": 0.6109,
"step": 2029
},
{
"epoch": 1.7948717948717947,
"grad_norm": 0.25561514496803284,
"learning_rate": 1.177945463154405e-05,
"loss": 0.6183,
"step": 2030
},
{
"epoch": 1.7957559681697612,
"grad_norm": 0.23451489210128784,
"learning_rate": 1.177255054538669e-05,
"loss": 0.5835,
"step": 2031
},
{
"epoch": 1.7966401414677278,
"grad_norm": 0.24993616342544556,
"learning_rate": 1.1765645586800165e-05,
"loss": 0.6063,
"step": 2032
},
{
"epoch": 1.797524314765694,
"grad_norm": 0.24009659886360168,
"learning_rate": 1.1758739759183011e-05,
"loss": 0.6168,
"step": 2033
},
{
"epoch": 1.7984084880636604,
"grad_norm": 0.24056893587112427,
"learning_rate": 1.17518330659342e-05,
"loss": 0.5838,
"step": 2034
},
{
"epoch": 1.799292661361627,
"grad_norm": 0.25318005681037903,
"learning_rate": 1.1744925510453128e-05,
"loss": 0.5959,
"step": 2035
},
{
"epoch": 1.8001768346595934,
"grad_norm": 0.24396154284477234,
"learning_rate": 1.1738017096139615e-05,
"loss": 0.6044,
"step": 2036
},
{
"epoch": 1.8010610079575597,
"grad_norm": 0.24170322716236115,
"learning_rate": 1.1731107826393903e-05,
"loss": 0.6302,
"step": 2037
},
{
"epoch": 1.801945181255526,
"grad_norm": 0.256404846906662,
"learning_rate": 1.1724197704616653e-05,
"loss": 0.6299,
"step": 2038
},
{
"epoch": 1.8028293545534924,
"grad_norm": 0.23811738193035126,
"learning_rate": 1.1717286734208957e-05,
"loss": 0.6147,
"step": 2039
},
{
"epoch": 1.8037135278514589,
"grad_norm": 0.2353544980287552,
"learning_rate": 1.1710374918572308e-05,
"loss": 0.5992,
"step": 2040
},
{
"epoch": 1.8045977011494254,
"grad_norm": 0.2557573914527893,
"learning_rate": 1.1703462261108623e-05,
"loss": 0.6363,
"step": 2041
},
{
"epoch": 1.8054818744473917,
"grad_norm": 0.24552226066589355,
"learning_rate": 1.1696548765220237e-05,
"loss": 0.6082,
"step": 2042
},
{
"epoch": 1.806366047745358,
"grad_norm": 0.23837116360664368,
"learning_rate": 1.1689634434309891e-05,
"loss": 0.5963,
"step": 2043
},
{
"epoch": 1.8072502210433243,
"grad_norm": 0.2540528178215027,
"learning_rate": 1.1682719271780741e-05,
"loss": 0.6064,
"step": 2044
},
{
"epoch": 1.8081343943412909,
"grad_norm": 0.23967784643173218,
"learning_rate": 1.1675803281036351e-05,
"loss": 0.5977,
"step": 2045
},
{
"epoch": 1.8090185676392574,
"grad_norm": 0.2566947042942047,
"learning_rate": 1.1668886465480693e-05,
"loss": 0.6194,
"step": 2046
},
{
"epoch": 1.8099027409372237,
"grad_norm": 0.23291194438934326,
"learning_rate": 1.1661968828518142e-05,
"loss": 0.6035,
"step": 2047
},
{
"epoch": 1.81078691423519,
"grad_norm": 0.24380168318748474,
"learning_rate": 1.1655050373553488e-05,
"loss": 0.62,
"step": 2048
},
{
"epoch": 1.8116710875331565,
"grad_norm": 0.2619140148162842,
"learning_rate": 1.1648131103991905e-05,
"loss": 0.6171,
"step": 2049
},
{
"epoch": 1.812555260831123,
"grad_norm": 0.2591189742088318,
"learning_rate": 1.1641211023238986e-05,
"loss": 0.6362,
"step": 2050
},
{
"epoch": 1.8134394341290894,
"grad_norm": 0.2395847737789154,
"learning_rate": 1.1634290134700713e-05,
"loss": 0.6033,
"step": 2051
},
{
"epoch": 1.8143236074270557,
"grad_norm": 0.24872010946273804,
"learning_rate": 1.1627368441783472e-05,
"loss": 0.5998,
"step": 2052
},
{
"epoch": 1.815207780725022,
"grad_norm": 0.23490813374519348,
"learning_rate": 1.1620445947894035e-05,
"loss": 0.6013,
"step": 2053
},
{
"epoch": 1.8160919540229885,
"grad_norm": 0.235739067196846,
"learning_rate": 1.1613522656439577e-05,
"loss": 0.5953,
"step": 2054
},
{
"epoch": 1.816976127320955,
"grad_norm": 0.2463558167219162,
"learning_rate": 1.160659857082767e-05,
"loss": 0.6125,
"step": 2055
},
{
"epoch": 1.8178603006189213,
"grad_norm": 0.24444468319416046,
"learning_rate": 1.1599673694466258e-05,
"loss": 0.5966,
"step": 2056
},
{
"epoch": 1.8187444739168876,
"grad_norm": 0.2572228014469147,
"learning_rate": 1.1592748030763694e-05,
"loss": 0.6226,
"step": 2057
},
{
"epoch": 1.819628647214854,
"grad_norm": 0.2584141492843628,
"learning_rate": 1.1585821583128709e-05,
"loss": 0.6303,
"step": 2058
},
{
"epoch": 1.8205128205128205,
"grad_norm": 0.2463539093732834,
"learning_rate": 1.1578894354970423e-05,
"loss": 0.615,
"step": 2059
},
{
"epoch": 1.821396993810787,
"grad_norm": 0.24145223200321198,
"learning_rate": 1.1571966349698334e-05,
"loss": 0.6057,
"step": 2060
},
{
"epoch": 1.8222811671087533,
"grad_norm": 0.2555166482925415,
"learning_rate": 1.1565037570722333e-05,
"loss": 0.6356,
"step": 2061
},
{
"epoch": 1.8231653404067196,
"grad_norm": 0.24166712164878845,
"learning_rate": 1.1558108021452684e-05,
"loss": 0.5891,
"step": 2062
},
{
"epoch": 1.8240495137046862,
"grad_norm": 0.2437223196029663,
"learning_rate": 1.155117770530003e-05,
"loss": 0.6156,
"step": 2063
},
{
"epoch": 1.8249336870026527,
"grad_norm": 0.24179872870445251,
"learning_rate": 1.1544246625675397e-05,
"loss": 0.5933,
"step": 2064
},
{
"epoch": 1.825817860300619,
"grad_norm": 0.2527466118335724,
"learning_rate": 1.1537314785990178e-05,
"loss": 0.6152,
"step": 2065
},
{
"epoch": 1.8267020335985853,
"grad_norm": 0.25272810459136963,
"learning_rate": 1.1530382189656155e-05,
"loss": 0.6122,
"step": 2066
},
{
"epoch": 1.8275862068965516,
"grad_norm": 0.24457749724388123,
"learning_rate": 1.1523448840085461e-05,
"loss": 0.5979,
"step": 2067
},
{
"epoch": 1.8284703801945181,
"grad_norm": 0.2396862655878067,
"learning_rate": 1.1516514740690621e-05,
"loss": 0.5919,
"step": 2068
},
{
"epoch": 1.8293545534924847,
"grad_norm": 0.25269728899002075,
"learning_rate": 1.1509579894884515e-05,
"loss": 0.6101,
"step": 2069
},
{
"epoch": 1.830238726790451,
"grad_norm": 0.2618043124675751,
"learning_rate": 1.1502644306080395e-05,
"loss": 0.6064,
"step": 2070
},
{
"epoch": 1.8311229000884173,
"grad_norm": 0.2405078113079071,
"learning_rate": 1.1495707977691884e-05,
"loss": 0.5939,
"step": 2071
},
{
"epoch": 1.8320070733863836,
"grad_norm": 0.24432392418384552,
"learning_rate": 1.1488770913132956e-05,
"loss": 0.6035,
"step": 2072
},
{
"epoch": 1.83289124668435,
"grad_norm": 0.24507810175418854,
"learning_rate": 1.148183311581796e-05,
"loss": 0.6021,
"step": 2073
},
{
"epoch": 1.8337754199823166,
"grad_norm": 0.24560154974460602,
"learning_rate": 1.1474894589161598e-05,
"loss": 0.6291,
"step": 2074
},
{
"epoch": 1.834659593280283,
"grad_norm": 0.2349531054496765,
"learning_rate": 1.146795533657894e-05,
"loss": 0.6048,
"step": 2075
},
{
"epoch": 1.8355437665782492,
"grad_norm": 0.24420738220214844,
"learning_rate": 1.1461015361485395e-05,
"loss": 0.6213,
"step": 2076
},
{
"epoch": 1.8364279398762158,
"grad_norm": 0.2590208649635315,
"learning_rate": 1.145407466729675e-05,
"loss": 0.616,
"step": 2077
},
{
"epoch": 1.8373121131741823,
"grad_norm": 0.2448689341545105,
"learning_rate": 1.1447133257429135e-05,
"loss": 0.6087,
"step": 2078
},
{
"epoch": 1.8381962864721486,
"grad_norm": 0.24580077826976776,
"learning_rate": 1.1440191135299029e-05,
"loss": 0.6079,
"step": 2079
},
{
"epoch": 1.839080459770115,
"grad_norm": 0.2409096360206604,
"learning_rate": 1.1433248304323265e-05,
"loss": 0.5692,
"step": 2080
},
{
"epoch": 1.8399646330680812,
"grad_norm": 0.2513170540332794,
"learning_rate": 1.142630476791903e-05,
"loss": 0.6208,
"step": 2081
},
{
"epoch": 1.8408488063660478,
"grad_norm": 0.24503926932811737,
"learning_rate": 1.1419360529503846e-05,
"loss": 0.6047,
"step": 2082
},
{
"epoch": 1.8417329796640143,
"grad_norm": 0.24885039031505585,
"learning_rate": 1.1412415592495593e-05,
"loss": 0.5989,
"step": 2083
},
{
"epoch": 1.8426171529619806,
"grad_norm": 0.24474181234836578,
"learning_rate": 1.1405469960312492e-05,
"loss": 0.6213,
"step": 2084
},
{
"epoch": 1.843501326259947,
"grad_norm": 0.2517824172973633,
"learning_rate": 1.1398523636373098e-05,
"loss": 0.6208,
"step": 2085
},
{
"epoch": 1.8443854995579132,
"grad_norm": 0.2442937046289444,
"learning_rate": 1.1391576624096317e-05,
"loss": 0.6031,
"step": 2086
},
{
"epoch": 1.8452696728558797,
"grad_norm": 0.24804413318634033,
"learning_rate": 1.1384628926901384e-05,
"loss": 0.6123,
"step": 2087
},
{
"epoch": 1.8461538461538463,
"grad_norm": 0.2324983924627304,
"learning_rate": 1.1377680548207887e-05,
"loss": 0.5934,
"step": 2088
},
{
"epoch": 1.8470380194518126,
"grad_norm": 0.2412562221288681,
"learning_rate": 1.1370731491435727e-05,
"loss": 0.606,
"step": 2089
},
{
"epoch": 1.8479221927497789,
"grad_norm": 0.24440523982048035,
"learning_rate": 1.1363781760005156e-05,
"loss": 0.6042,
"step": 2090
},
{
"epoch": 1.8488063660477454,
"grad_norm": 0.24042700231075287,
"learning_rate": 1.1356831357336754e-05,
"loss": 0.5965,
"step": 2091
},
{
"epoch": 1.849690539345712,
"grad_norm": 0.24741309881210327,
"learning_rate": 1.1349880286851424e-05,
"loss": 0.6015,
"step": 2092
},
{
"epoch": 1.8505747126436782,
"grad_norm": 0.25323837995529175,
"learning_rate": 1.1342928551970412e-05,
"loss": 0.5974,
"step": 2093
},
{
"epoch": 1.8514588859416445,
"grad_norm": 0.24051597714424133,
"learning_rate": 1.1335976156115276e-05,
"loss": 0.6032,
"step": 2094
},
{
"epoch": 1.8523430592396108,
"grad_norm": 0.25649482011795044,
"learning_rate": 1.1329023102707907e-05,
"loss": 0.6148,
"step": 2095
},
{
"epoch": 1.8532272325375774,
"grad_norm": 0.24064001441001892,
"learning_rate": 1.132206939517052e-05,
"loss": 0.6063,
"step": 2096
},
{
"epoch": 1.854111405835544,
"grad_norm": 0.2414870411157608,
"learning_rate": 1.131511503692565e-05,
"loss": 0.6047,
"step": 2097
},
{
"epoch": 1.8549955791335102,
"grad_norm": 0.25330039858818054,
"learning_rate": 1.1308160031396154e-05,
"loss": 0.617,
"step": 2098
},
{
"epoch": 1.8558797524314765,
"grad_norm": 0.2417076826095581,
"learning_rate": 1.1301204382005207e-05,
"loss": 0.6173,
"step": 2099
},
{
"epoch": 1.8567639257294428,
"grad_norm": 0.24101918935775757,
"learning_rate": 1.1294248092176296e-05,
"loss": 0.615,
"step": 2100
},
{
"epoch": 1.8576480990274093,
"grad_norm": 0.2720644772052765,
"learning_rate": 1.1287291165333233e-05,
"loss": 0.6467,
"step": 2101
},
{
"epoch": 1.8585322723253759,
"grad_norm": 0.23659172654151917,
"learning_rate": 1.1280333604900136e-05,
"loss": 0.6073,
"step": 2102
},
{
"epoch": 1.8594164456233422,
"grad_norm": 0.24826744198799133,
"learning_rate": 1.127337541430143e-05,
"loss": 0.5999,
"step": 2103
},
{
"epoch": 1.8603006189213085,
"grad_norm": 0.2405870258808136,
"learning_rate": 1.1266416596961874e-05,
"loss": 0.5911,
"step": 2104
},
{
"epoch": 1.861184792219275,
"grad_norm": 0.23560138046741486,
"learning_rate": 1.1259457156306498e-05,
"loss": 0.6122,
"step": 2105
},
{
"epoch": 1.8620689655172413,
"grad_norm": 0.24264702200889587,
"learning_rate": 1.1252497095760674e-05,
"loss": 0.606,
"step": 2106
},
{
"epoch": 1.8629531388152079,
"grad_norm": 0.25198376178741455,
"learning_rate": 1.1245536418750061e-05,
"loss": 0.6239,
"step": 2107
},
{
"epoch": 1.8638373121131742,
"grad_norm": 0.24715298414230347,
"learning_rate": 1.1238575128700621e-05,
"loss": 0.612,
"step": 2108
},
{
"epoch": 1.8647214854111405,
"grad_norm": 0.24022288620471954,
"learning_rate": 1.1231613229038627e-05,
"loss": 0.5964,
"step": 2109
},
{
"epoch": 1.865605658709107,
"grad_norm": 0.23906004428863525,
"learning_rate": 1.122465072319064e-05,
"loss": 0.5973,
"step": 2110
},
{
"epoch": 1.8664898320070735,
"grad_norm": 0.24333196878433228,
"learning_rate": 1.1217687614583534e-05,
"loss": 0.6114,
"step": 2111
},
{
"epoch": 1.8673740053050398,
"grad_norm": 0.25038817524909973,
"learning_rate": 1.1210723906644465e-05,
"loss": 0.6282,
"step": 2112
},
{
"epoch": 1.8682581786030061,
"grad_norm": 0.2572545111179352,
"learning_rate": 1.1203759602800894e-05,
"loss": 0.616,
"step": 2113
},
{
"epoch": 1.8691423519009724,
"grad_norm": 0.24151872098445892,
"learning_rate": 1.1196794706480572e-05,
"loss": 0.5969,
"step": 2114
},
{
"epoch": 1.870026525198939,
"grad_norm": 0.23897315561771393,
"learning_rate": 1.1189829221111539e-05,
"loss": 0.602,
"step": 2115
},
{
"epoch": 1.8709106984969055,
"grad_norm": 0.2581343948841095,
"learning_rate": 1.1182863150122127e-05,
"loss": 0.5914,
"step": 2116
},
{
"epoch": 1.8717948717948718,
"grad_norm": 0.2537383735179901,
"learning_rate": 1.1175896496940966e-05,
"loss": 0.6355,
"step": 2117
},
{
"epoch": 1.8726790450928381,
"grad_norm": 0.24117690324783325,
"learning_rate": 1.1168929264996951e-05,
"loss": 0.6235,
"step": 2118
},
{
"epoch": 1.8735632183908046,
"grad_norm": 0.2524166703224182,
"learning_rate": 1.1161961457719279e-05,
"loss": 0.6038,
"step": 2119
},
{
"epoch": 1.874447391688771,
"grad_norm": 0.2434546798467636,
"learning_rate": 1.115499307853743e-05,
"loss": 0.6162,
"step": 2120
},
{
"epoch": 1.8753315649867375,
"grad_norm": 0.24508021771907806,
"learning_rate": 1.1148024130881152e-05,
"loss": 0.6173,
"step": 2121
},
{
"epoch": 1.8762157382847038,
"grad_norm": 0.2517911195755005,
"learning_rate": 1.114105461818049e-05,
"loss": 0.6177,
"step": 2122
},
{
"epoch": 1.87709991158267,
"grad_norm": 0.24528320133686066,
"learning_rate": 1.113408454386575e-05,
"loss": 0.6239,
"step": 2123
},
{
"epoch": 1.8779840848806366,
"grad_norm": 0.26639533042907715,
"learning_rate": 1.112711391136753e-05,
"loss": 0.6034,
"step": 2124
},
{
"epoch": 1.8788682581786031,
"grad_norm": 0.24178391695022583,
"learning_rate": 1.1120142724116692e-05,
"loss": 0.5749,
"step": 2125
},
{
"epoch": 1.8797524314765695,
"grad_norm": 0.26185885071754456,
"learning_rate": 1.111317098554438e-05,
"loss": 0.6184,
"step": 2126
},
{
"epoch": 1.8806366047745358,
"grad_norm": 0.260879784822464,
"learning_rate": 1.1106198699081999e-05,
"loss": 0.6149,
"step": 2127
},
{
"epoch": 1.881520778072502,
"grad_norm": 0.2436174899339676,
"learning_rate": 1.1099225868161229e-05,
"loss": 0.5976,
"step": 2128
},
{
"epoch": 1.8824049513704686,
"grad_norm": 0.25896623730659485,
"learning_rate": 1.1092252496214022e-05,
"loss": 0.6091,
"step": 2129
},
{
"epoch": 1.8832891246684351,
"grad_norm": 0.2618313729763031,
"learning_rate": 1.108527858667259e-05,
"loss": 0.6037,
"step": 2130
},
{
"epoch": 1.8841732979664014,
"grad_norm": 0.2363477349281311,
"learning_rate": 1.107830414296941e-05,
"loss": 0.5939,
"step": 2131
},
{
"epoch": 1.8850574712643677,
"grad_norm": 0.2526490092277527,
"learning_rate": 1.1071329168537223e-05,
"loss": 0.6415,
"step": 2132
},
{
"epoch": 1.8859416445623343,
"grad_norm": 0.2663135230541229,
"learning_rate": 1.1064353666809037e-05,
"loss": 0.6197,
"step": 2133
},
{
"epoch": 1.8868258178603006,
"grad_norm": 0.2526358962059021,
"learning_rate": 1.1057377641218113e-05,
"loss": 0.5982,
"step": 2134
},
{
"epoch": 1.887709991158267,
"grad_norm": 0.2511078715324402,
"learning_rate": 1.1050401095197966e-05,
"loss": 0.616,
"step": 2135
},
{
"epoch": 1.8885941644562334,
"grad_norm": 0.2568417489528656,
"learning_rate": 1.1043424032182377e-05,
"loss": 0.6248,
"step": 2136
},
{
"epoch": 1.8894783377541997,
"grad_norm": 0.24939990043640137,
"learning_rate": 1.1036446455605376e-05,
"loss": 0.6191,
"step": 2137
},
{
"epoch": 1.8903625110521662,
"grad_norm": 0.2533826231956482,
"learning_rate": 1.1029468368901249e-05,
"loss": 0.612,
"step": 2138
},
{
"epoch": 1.8912466843501328,
"grad_norm": 0.24376453459262848,
"learning_rate": 1.1022489775504523e-05,
"loss": 0.5987,
"step": 2139
},
{
"epoch": 1.892130857648099,
"grad_norm": 0.26400092244148254,
"learning_rate": 1.1015510678849994e-05,
"loss": 0.5884,
"step": 2140
},
{
"epoch": 1.8930150309460654,
"grad_norm": 0.24996677041053772,
"learning_rate": 1.1008531082372681e-05,
"loss": 0.5863,
"step": 2141
},
{
"epoch": 1.8938992042440317,
"grad_norm": 0.25784632563591003,
"learning_rate": 1.1001550989507875e-05,
"loss": 0.6199,
"step": 2142
},
{
"epoch": 1.8947833775419982,
"grad_norm": 0.2422391027212143,
"learning_rate": 1.0994570403691086e-05,
"loss": 0.6072,
"step": 2143
},
{
"epoch": 1.8956675508399647,
"grad_norm": 0.2514199912548065,
"learning_rate": 1.0987589328358086e-05,
"loss": 0.6004,
"step": 2144
},
{
"epoch": 1.896551724137931,
"grad_norm": 0.25177592039108276,
"learning_rate": 1.0980607766944878e-05,
"loss": 0.6223,
"step": 2145
},
{
"epoch": 1.8974358974358974,
"grad_norm": 0.25119173526763916,
"learning_rate": 1.0973625722887703e-05,
"loss": 0.5803,
"step": 2146
},
{
"epoch": 1.8983200707338639,
"grad_norm": 0.2429027259349823,
"learning_rate": 1.0966643199623057e-05,
"loss": 0.6144,
"step": 2147
},
{
"epoch": 1.8992042440318302,
"grad_norm": 0.26296576857566833,
"learning_rate": 1.0959660200587643e-05,
"loss": 0.6067,
"step": 2148
},
{
"epoch": 1.9000884173297967,
"grad_norm": 0.2495930790901184,
"learning_rate": 1.0952676729218422e-05,
"loss": 0.6348,
"step": 2149
},
{
"epoch": 1.900972590627763,
"grad_norm": 0.2578394412994385,
"learning_rate": 1.0945692788952575e-05,
"loss": 0.6292,
"step": 2150
},
{
"epoch": 1.9018567639257293,
"grad_norm": 0.2476021945476532,
"learning_rate": 1.093870838322752e-05,
"loss": 0.6133,
"step": 2151
},
{
"epoch": 1.9027409372236959,
"grad_norm": 0.26183298230171204,
"learning_rate": 1.0931723515480901e-05,
"loss": 0.6127,
"step": 2152
},
{
"epoch": 1.9036251105216624,
"grad_norm": 0.2544593811035156,
"learning_rate": 1.0924738189150595e-05,
"loss": 0.6051,
"step": 2153
},
{
"epoch": 1.9045092838196287,
"grad_norm": 0.2475515455007553,
"learning_rate": 1.091775240767469e-05,
"loss": 0.6197,
"step": 2154
},
{
"epoch": 1.905393457117595,
"grad_norm": 0.263184130191803,
"learning_rate": 1.0910766174491519e-05,
"loss": 0.5812,
"step": 2155
},
{
"epoch": 1.9062776304155613,
"grad_norm": 0.2499268800020218,
"learning_rate": 1.090377949303962e-05,
"loss": 0.6198,
"step": 2156
},
{
"epoch": 1.9071618037135278,
"grad_norm": 0.2741779386997223,
"learning_rate": 1.0896792366757757e-05,
"loss": 0.6248,
"step": 2157
},
{
"epoch": 1.9080459770114944,
"grad_norm": 0.2475203275680542,
"learning_rate": 1.0889804799084916e-05,
"loss": 0.5969,
"step": 2158
},
{
"epoch": 1.9089301503094607,
"grad_norm": 0.2527630925178528,
"learning_rate": 1.0882816793460296e-05,
"loss": 0.6154,
"step": 2159
},
{
"epoch": 1.909814323607427,
"grad_norm": 0.2516087293624878,
"learning_rate": 1.0875828353323321e-05,
"loss": 0.6329,
"step": 2160
},
{
"epoch": 1.9106984969053935,
"grad_norm": 0.2411414384841919,
"learning_rate": 1.0868839482113612e-05,
"loss": 0.5963,
"step": 2161
},
{
"epoch": 1.9115826702033598,
"grad_norm": 0.2573741376399994,
"learning_rate": 1.0861850183271016e-05,
"loss": 0.6131,
"step": 2162
},
{
"epoch": 1.9124668435013263,
"grad_norm": 0.23459602892398834,
"learning_rate": 1.0854860460235588e-05,
"loss": 0.5853,
"step": 2163
},
{
"epoch": 1.9133510167992926,
"grad_norm": 0.27649685740470886,
"learning_rate": 1.0847870316447589e-05,
"loss": 0.6678,
"step": 2164
},
{
"epoch": 1.914235190097259,
"grad_norm": 0.2748173475265503,
"learning_rate": 1.0840879755347484e-05,
"loss": 0.627,
"step": 2165
},
{
"epoch": 1.9151193633952255,
"grad_norm": 0.2539891004562378,
"learning_rate": 1.0833888780375954e-05,
"loss": 0.5963,
"step": 2166
},
{
"epoch": 1.916003536693192,
"grad_norm": 0.24686411023139954,
"learning_rate": 1.0826897394973872e-05,
"loss": 0.6089,
"step": 2167
},
{
"epoch": 1.9168877099911583,
"grad_norm": 0.24498027563095093,
"learning_rate": 1.0819905602582318e-05,
"loss": 0.6068,
"step": 2168
},
{
"epoch": 1.9177718832891246,
"grad_norm": 0.25686606764793396,
"learning_rate": 1.081291340664258e-05,
"loss": 0.6203,
"step": 2169
},
{
"epoch": 1.918656056587091,
"grad_norm": 0.2448924332857132,
"learning_rate": 1.0805920810596127e-05,
"loss": 0.613,
"step": 2170
},
{
"epoch": 1.9195402298850575,
"grad_norm": 0.23723550140857697,
"learning_rate": 1.0798927817884644e-05,
"loss": 0.6173,
"step": 2171
},
{
"epoch": 1.920424403183024,
"grad_norm": 0.25689834356307983,
"learning_rate": 1.0791934431949995e-05,
"loss": 0.6136,
"step": 2172
},
{
"epoch": 1.9213085764809903,
"grad_norm": 0.2391226440668106,
"learning_rate": 1.0784940656234248e-05,
"loss": 0.5896,
"step": 2173
},
{
"epoch": 1.9221927497789566,
"grad_norm": 0.2465284764766693,
"learning_rate": 1.077794649417966e-05,
"loss": 0.6136,
"step": 2174
},
{
"epoch": 1.9230769230769231,
"grad_norm": 0.23603248596191406,
"learning_rate": 1.0770951949228677e-05,
"loss": 0.6019,
"step": 2175
},
{
"epoch": 1.9239610963748894,
"grad_norm": 0.251782089471817,
"learning_rate": 1.0763957024823937e-05,
"loss": 0.6195,
"step": 2176
},
{
"epoch": 1.924845269672856,
"grad_norm": 0.24139857292175293,
"learning_rate": 1.0756961724408254e-05,
"loss": 0.6279,
"step": 2177
},
{
"epoch": 1.9257294429708223,
"grad_norm": 0.2380710393190384,
"learning_rate": 1.0749966051424645e-05,
"loss": 0.6148,
"step": 2178
},
{
"epoch": 1.9266136162687886,
"grad_norm": 0.25297775864601135,
"learning_rate": 1.0742970009316295e-05,
"loss": 0.6161,
"step": 2179
},
{
"epoch": 1.927497789566755,
"grad_norm": 0.24412831664085388,
"learning_rate": 1.073597360152658e-05,
"loss": 0.6012,
"step": 2180
},
{
"epoch": 1.9283819628647216,
"grad_norm": 0.25303107500076294,
"learning_rate": 1.0728976831499045e-05,
"loss": 0.613,
"step": 2181
},
{
"epoch": 1.929266136162688,
"grad_norm": 0.25016266107559204,
"learning_rate": 1.0721979702677428e-05,
"loss": 0.6166,
"step": 2182
},
{
"epoch": 1.9301503094606542,
"grad_norm": 0.23695309460163116,
"learning_rate": 1.0714982218505632e-05,
"loss": 0.5891,
"step": 2183
},
{
"epoch": 1.9310344827586206,
"grad_norm": 0.24676333367824554,
"learning_rate": 1.0707984382427742e-05,
"loss": 0.6171,
"step": 2184
},
{
"epoch": 1.931918656056587,
"grad_norm": 0.24085931479930878,
"learning_rate": 1.0700986197888008e-05,
"loss": 0.5892,
"step": 2185
},
{
"epoch": 1.9328028293545536,
"grad_norm": 0.24577569961547852,
"learning_rate": 1.0693987668330861e-05,
"loss": 0.6155,
"step": 2186
},
{
"epoch": 1.93368700265252,
"grad_norm": 0.23324774205684662,
"learning_rate": 1.0686988797200896e-05,
"loss": 0.6008,
"step": 2187
},
{
"epoch": 1.9345711759504862,
"grad_norm": 0.2533464729785919,
"learning_rate": 1.0679989587942876e-05,
"loss": 0.6385,
"step": 2188
},
{
"epoch": 1.9354553492484527,
"grad_norm": 0.24033434689044952,
"learning_rate": 1.0672990044001739e-05,
"loss": 0.5979,
"step": 2189
},
{
"epoch": 1.936339522546419,
"grad_norm": 0.23374491930007935,
"learning_rate": 1.066599016882257e-05,
"loss": 0.5995,
"step": 2190
},
{
"epoch": 1.9372236958443856,
"grad_norm": 0.24092251062393188,
"learning_rate": 1.0658989965850635e-05,
"loss": 0.5998,
"step": 2191
},
{
"epoch": 1.938107869142352,
"grad_norm": 0.24086640775203705,
"learning_rate": 1.0651989438531355e-05,
"loss": 0.6201,
"step": 2192
},
{
"epoch": 1.9389920424403182,
"grad_norm": 0.23128457367420197,
"learning_rate": 1.0644988590310303e-05,
"loss": 0.5773,
"step": 2193
},
{
"epoch": 1.9398762157382847,
"grad_norm": 0.2386971265077591,
"learning_rate": 1.0637987424633222e-05,
"loss": 0.5801,
"step": 2194
},
{
"epoch": 1.9407603890362513,
"grad_norm": 0.23755161464214325,
"learning_rate": 1.0630985944946005e-05,
"loss": 0.5957,
"step": 2195
},
{
"epoch": 1.9416445623342176,
"grad_norm": 0.23651912808418274,
"learning_rate": 1.0623984154694703e-05,
"loss": 0.5946,
"step": 2196
},
{
"epoch": 1.9425287356321839,
"grad_norm": 0.24239900708198547,
"learning_rate": 1.0616982057325507e-05,
"loss": 0.6293,
"step": 2197
},
{
"epoch": 1.9434129089301502,
"grad_norm": 0.2472626268863678,
"learning_rate": 1.0609979656284781e-05,
"loss": 0.6034,
"step": 2198
},
{
"epoch": 1.9442970822281167,
"grad_norm": 0.23711003363132477,
"learning_rate": 1.0602976955019024e-05,
"loss": 0.5876,
"step": 2199
},
{
"epoch": 1.9451812555260832,
"grad_norm": 0.243400439620018,
"learning_rate": 1.0595973956974886e-05,
"loss": 0.6055,
"step": 2200
},
{
"epoch": 1.9460654288240495,
"grad_norm": 0.24460351467132568,
"learning_rate": 1.0588970665599158e-05,
"loss": 0.6017,
"step": 2201
},
{
"epoch": 1.9469496021220158,
"grad_norm": 0.2484050840139389,
"learning_rate": 1.0581967084338785e-05,
"loss": 0.6297,
"step": 2202
},
{
"epoch": 1.9478337754199824,
"grad_norm": 0.25774282217025757,
"learning_rate": 1.0574963216640848e-05,
"loss": 0.6022,
"step": 2203
},
{
"epoch": 1.9487179487179487,
"grad_norm": 0.24453866481781006,
"learning_rate": 1.056795906595257e-05,
"loss": 0.6293,
"step": 2204
},
{
"epoch": 1.9496021220159152,
"grad_norm": 0.24541209638118744,
"learning_rate": 1.0560954635721319e-05,
"loss": 0.604,
"step": 2205
},
{
"epoch": 1.9504862953138815,
"grad_norm": 0.2463800609111786,
"learning_rate": 1.0553949929394587e-05,
"loss": 0.5869,
"step": 2206
},
{
"epoch": 1.9513704686118478,
"grad_norm": 0.2467951625585556,
"learning_rate": 1.054694495042002e-05,
"loss": 0.6071,
"step": 2207
},
{
"epoch": 1.9522546419098143,
"grad_norm": 0.2561027407646179,
"learning_rate": 1.053993970224538e-05,
"loss": 0.6041,
"step": 2208
},
{
"epoch": 1.9531388152077809,
"grad_norm": 0.2361050695180893,
"learning_rate": 1.053293418831858e-05,
"loss": 0.5979,
"step": 2209
},
{
"epoch": 1.9540229885057472,
"grad_norm": 0.2632717788219452,
"learning_rate": 1.0525928412087643e-05,
"loss": 0.6104,
"step": 2210
},
{
"epoch": 1.9549071618037135,
"grad_norm": 0.24181409180164337,
"learning_rate": 1.0518922377000742e-05,
"loss": 0.5945,
"step": 2211
},
{
"epoch": 1.9557913351016798,
"grad_norm": 0.2597675323486328,
"learning_rate": 1.0511916086506163e-05,
"loss": 0.6172,
"step": 2212
},
{
"epoch": 1.9566755083996463,
"grad_norm": 0.2529585659503937,
"learning_rate": 1.0504909544052321e-05,
"loss": 0.5883,
"step": 2213
},
{
"epoch": 1.9575596816976129,
"grad_norm": 0.23550425469875336,
"learning_rate": 1.0497902753087761e-05,
"loss": 0.6069,
"step": 2214
},
{
"epoch": 1.9584438549955792,
"grad_norm": 0.2478284388780594,
"learning_rate": 1.0490895717061143e-05,
"loss": 0.5951,
"step": 2215
},
{
"epoch": 1.9593280282935455,
"grad_norm": 0.24016354978084564,
"learning_rate": 1.0483888439421249e-05,
"loss": 0.6088,
"step": 2216
},
{
"epoch": 1.9602122015915118,
"grad_norm": 0.23573939502239227,
"learning_rate": 1.0476880923616979e-05,
"loss": 0.6043,
"step": 2217
},
{
"epoch": 1.9610963748894783,
"grad_norm": 0.24266421794891357,
"learning_rate": 1.046987317309736e-05,
"loss": 0.6244,
"step": 2218
},
{
"epoch": 1.9619805481874448,
"grad_norm": 0.24119625985622406,
"learning_rate": 1.0462865191311517e-05,
"loss": 0.6091,
"step": 2219
},
{
"epoch": 1.9628647214854111,
"grad_norm": 0.25130897760391235,
"learning_rate": 1.045585698170871e-05,
"loss": 0.6137,
"step": 2220
},
{
"epoch": 1.9637488947833774,
"grad_norm": 0.23746778070926666,
"learning_rate": 1.044884854773829e-05,
"loss": 0.6002,
"step": 2221
},
{
"epoch": 1.964633068081344,
"grad_norm": 0.24798300862312317,
"learning_rate": 1.0441839892849734e-05,
"loss": 0.6046,
"step": 2222
},
{
"epoch": 1.9655172413793105,
"grad_norm": 0.24753214418888092,
"learning_rate": 1.0434831020492622e-05,
"loss": 0.6259,
"step": 2223
},
{
"epoch": 1.9664014146772768,
"grad_norm": 0.2427685558795929,
"learning_rate": 1.0427821934116636e-05,
"loss": 0.5911,
"step": 2224
},
{
"epoch": 1.967285587975243,
"grad_norm": 0.24075506627559662,
"learning_rate": 1.0420812637171576e-05,
"loss": 0.5956,
"step": 2225
},
{
"epoch": 1.9681697612732094,
"grad_norm": 0.25711631774902344,
"learning_rate": 1.041380313310733e-05,
"loss": 0.6351,
"step": 2226
},
{
"epoch": 1.969053934571176,
"grad_norm": 0.23846381902694702,
"learning_rate": 1.0406793425373903e-05,
"loss": 0.6121,
"step": 2227
},
{
"epoch": 1.9699381078691425,
"grad_norm": 0.23850500583648682,
"learning_rate": 1.0399783517421389e-05,
"loss": 0.6174,
"step": 2228
},
{
"epoch": 1.9708222811671088,
"grad_norm": 0.23999352753162384,
"learning_rate": 1.0392773412699986e-05,
"loss": 0.6195,
"step": 2229
},
{
"epoch": 1.971706454465075,
"grad_norm": 0.23970669507980347,
"learning_rate": 1.0385763114659986e-05,
"loss": 0.6078,
"step": 2230
},
{
"epoch": 1.9725906277630414,
"grad_norm": 0.24034547805786133,
"learning_rate": 1.0378752626751779e-05,
"loss": 0.6056,
"step": 2231
},
{
"epoch": 1.973474801061008,
"grad_norm": 0.2518119812011719,
"learning_rate": 1.0371741952425852e-05,
"loss": 0.6189,
"step": 2232
},
{
"epoch": 1.9743589743589745,
"grad_norm": 0.24527254700660706,
"learning_rate": 1.0364731095132768e-05,
"loss": 0.6192,
"step": 2233
},
{
"epoch": 1.9752431476569408,
"grad_norm": 0.23640985786914825,
"learning_rate": 1.03577200583232e-05,
"loss": 0.5946,
"step": 2234
},
{
"epoch": 1.976127320954907,
"grad_norm": 0.242376446723938,
"learning_rate": 1.0350708845447899e-05,
"loss": 0.6193,
"step": 2235
},
{
"epoch": 1.9770114942528736,
"grad_norm": 0.26663702726364136,
"learning_rate": 1.0343697459957702e-05,
"loss": 0.6322,
"step": 2236
},
{
"epoch": 1.9778956675508401,
"grad_norm": 0.2345614731311798,
"learning_rate": 1.0336685905303535e-05,
"loss": 0.5847,
"step": 2237
},
{
"epoch": 1.9787798408488064,
"grad_norm": 0.25089889764785767,
"learning_rate": 1.0329674184936404e-05,
"loss": 0.6247,
"step": 2238
},
{
"epoch": 1.9796640141467727,
"grad_norm": 0.24223099648952484,
"learning_rate": 1.0322662302307399e-05,
"loss": 0.6018,
"step": 2239
},
{
"epoch": 1.980548187444739,
"grad_norm": 0.2546231150627136,
"learning_rate": 1.0315650260867683e-05,
"loss": 0.6312,
"step": 2240
},
{
"epoch": 1.9814323607427056,
"grad_norm": 0.23767130076885223,
"learning_rate": 1.0308638064068512e-05,
"loss": 0.6063,
"step": 2241
},
{
"epoch": 1.982316534040672,
"grad_norm": 0.2504897713661194,
"learning_rate": 1.0301625715361201e-05,
"loss": 0.6094,
"step": 2242
},
{
"epoch": 1.9832007073386384,
"grad_norm": 0.233364999294281,
"learning_rate": 1.0294613218197156e-05,
"loss": 0.5979,
"step": 2243
},
{
"epoch": 1.9840848806366047,
"grad_norm": 0.24740083515644073,
"learning_rate": 1.0287600576027839e-05,
"loss": 0.6072,
"step": 2244
},
{
"epoch": 1.984969053934571,
"grad_norm": 0.24842876195907593,
"learning_rate": 1.0280587792304804e-05,
"loss": 0.6301,
"step": 2245
},
{
"epoch": 1.9858532272325375,
"grad_norm": 0.2382483333349228,
"learning_rate": 1.027357487047965e-05,
"loss": 0.5996,
"step": 2246
},
{
"epoch": 1.986737400530504,
"grad_norm": 0.24283906817436218,
"learning_rate": 1.0266561814004067e-05,
"loss": 0.5942,
"step": 2247
},
{
"epoch": 1.9876215738284704,
"grad_norm": 0.24749065935611725,
"learning_rate": 1.02595486263298e-05,
"loss": 0.6134,
"step": 2248
},
{
"epoch": 1.9885057471264367,
"grad_norm": 0.25264132022857666,
"learning_rate": 1.0252535310908657e-05,
"loss": 0.6316,
"step": 2249
},
{
"epoch": 1.9893899204244032,
"grad_norm": 0.24575389921665192,
"learning_rate": 1.0245521871192513e-05,
"loss": 0.6047,
"step": 2250
},
{
"epoch": 1.9902740937223697,
"grad_norm": 0.23502527177333832,
"learning_rate": 1.0238508310633306e-05,
"loss": 0.6103,
"step": 2251
},
{
"epoch": 1.991158267020336,
"grad_norm": 0.25971049070358276,
"learning_rate": 1.0231494632683026e-05,
"loss": 0.6005,
"step": 2252
},
{
"epoch": 1.9920424403183024,
"grad_norm": 0.25069931149482727,
"learning_rate": 1.0224480840793725e-05,
"loss": 0.6063,
"step": 2253
},
{
"epoch": 1.9929266136162687,
"grad_norm": 0.24305684864521027,
"learning_rate": 1.0217466938417518e-05,
"loss": 0.6283,
"step": 2254
},
{
"epoch": 1.9938107869142352,
"grad_norm": 0.25809407234191895,
"learning_rate": 1.0210452929006558e-05,
"loss": 0.6119,
"step": 2255
},
{
"epoch": 1.9946949602122017,
"grad_norm": 0.2345888614654541,
"learning_rate": 1.0203438816013068e-05,
"loss": 0.5848,
"step": 2256
},
{
"epoch": 1.995579133510168,
"grad_norm": 0.2608594000339508,
"learning_rate": 1.0196424602889312e-05,
"loss": 0.6126,
"step": 2257
},
{
"epoch": 1.9964633068081343,
"grad_norm": 0.24898038804531097,
"learning_rate": 1.0189410293087606e-05,
"loss": 0.6178,
"step": 2258
},
{
"epoch": 1.9973474801061006,
"grad_norm": 0.24251864850521088,
"learning_rate": 1.0182395890060317e-05,
"loss": 0.6261,
"step": 2259
},
{
"epoch": 1.9982316534040672,
"grad_norm": 0.2613992989063263,
"learning_rate": 1.0175381397259845e-05,
"loss": 0.6025,
"step": 2260
},
{
"epoch": 1.9991158267020337,
"grad_norm": 0.2566988468170166,
"learning_rate": 1.0168366818138654e-05,
"loss": 0.6179,
"step": 2261
},
{
"epoch": 2.0,
"grad_norm": 0.24998998641967773,
"learning_rate": 1.0161352156149229e-05,
"loss": 0.6225,
"step": 2262
}
],
"logging_steps": 1.0,
"max_steps": 4524,
"num_input_tokens_seen": 0,
"num_train_epochs": 4,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.2958131815695201e+20,
"train_batch_size": 3,
"trial_name": null,
"trial_params": null
}