Xinging's picture
Upload checkpoint-1400/trainer_state.json with huggingface_hub
46d08b7 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.4047412546978896,
"eval_steps": 500,
"global_step": 1400,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0002891008962127783,
"grad_norm": 10.526118368353698,
"learning_rate": 5.714285714285715e-07,
"loss": 1.8059,
"step": 1
},
{
"epoch": 0.0005782017924255566,
"grad_norm": 10.546832449734852,
"learning_rate": 1.142857142857143e-06,
"loss": 1.8355,
"step": 2
},
{
"epoch": 0.0008673026886383347,
"grad_norm": 9.678077753237705,
"learning_rate": 1.7142857142857145e-06,
"loss": 1.8885,
"step": 3
},
{
"epoch": 0.0011564035848511131,
"grad_norm": 7.133178553946601,
"learning_rate": 2.285714285714286e-06,
"loss": 1.9111,
"step": 4
},
{
"epoch": 0.0014455044810638912,
"grad_norm": 4.135673123106036,
"learning_rate": 2.8571428571428573e-06,
"loss": 1.7868,
"step": 5
},
{
"epoch": 0.0017346053772766695,
"grad_norm": 3.0971146670050285,
"learning_rate": 3.428571428571429e-06,
"loss": 1.8928,
"step": 6
},
{
"epoch": 0.002023706273489448,
"grad_norm": 2.5830650058945004,
"learning_rate": 4.000000000000001e-06,
"loss": 1.8522,
"step": 7
},
{
"epoch": 0.0023128071697022263,
"grad_norm": 2.3055953581941364,
"learning_rate": 4.571428571428572e-06,
"loss": 1.7831,
"step": 8
},
{
"epoch": 0.0026019080659150044,
"grad_norm": 1.8810933130960161,
"learning_rate": 5.142857142857142e-06,
"loss": 1.8335,
"step": 9
},
{
"epoch": 0.0028910089621277824,
"grad_norm": 2.308546357873222,
"learning_rate": 5.7142857142857145e-06,
"loss": 1.7858,
"step": 10
},
{
"epoch": 0.003180109858340561,
"grad_norm": 1.9146380317808562,
"learning_rate": 6.285714285714286e-06,
"loss": 1.8005,
"step": 11
},
{
"epoch": 0.003469210754553339,
"grad_norm": 1.7733597852169312,
"learning_rate": 6.857142857142858e-06,
"loss": 1.7313,
"step": 12
},
{
"epoch": 0.0037583116507661175,
"grad_norm": 1.9163487932648622,
"learning_rate": 7.428571428571429e-06,
"loss": 1.8676,
"step": 13
},
{
"epoch": 0.004047412546978896,
"grad_norm": 1.81837480862002,
"learning_rate": 8.000000000000001e-06,
"loss": 1.7696,
"step": 14
},
{
"epoch": 0.004336513443191674,
"grad_norm": 1.7872590059217233,
"learning_rate": 8.571428571428571e-06,
"loss": 1.804,
"step": 15
},
{
"epoch": 0.004625614339404453,
"grad_norm": 1.875901273963478,
"learning_rate": 9.142857142857144e-06,
"loss": 1.7379,
"step": 16
},
{
"epoch": 0.004914715235617231,
"grad_norm": 1.6902352819283006,
"learning_rate": 9.714285714285715e-06,
"loss": 1.7908,
"step": 17
},
{
"epoch": 0.005203816131830009,
"grad_norm": 1.673145216450599,
"learning_rate": 1.0285714285714285e-05,
"loss": 1.8525,
"step": 18
},
{
"epoch": 0.005492917028042787,
"grad_norm": 1.6917810585273594,
"learning_rate": 1.0857142857142858e-05,
"loss": 1.7779,
"step": 19
},
{
"epoch": 0.005782017924255565,
"grad_norm": 1.5858495861042294,
"learning_rate": 1.1428571428571429e-05,
"loss": 1.6791,
"step": 20
},
{
"epoch": 0.006071118820468344,
"grad_norm": 2.004987370634698,
"learning_rate": 1.2e-05,
"loss": 1.8019,
"step": 21
},
{
"epoch": 0.006360219716681122,
"grad_norm": 1.5348568469917172,
"learning_rate": 1.2571428571428572e-05,
"loss": 1.8565,
"step": 22
},
{
"epoch": 0.0066493206128939,
"grad_norm": 1.8857981149629766,
"learning_rate": 1.3142857142857145e-05,
"loss": 1.7292,
"step": 23
},
{
"epoch": 0.006938421509106678,
"grad_norm": 1.4949439821061543,
"learning_rate": 1.3714285714285716e-05,
"loss": 1.7357,
"step": 24
},
{
"epoch": 0.007227522405319456,
"grad_norm": 1.693165357859836,
"learning_rate": 1.4285714285714287e-05,
"loss": 1.8103,
"step": 25
},
{
"epoch": 0.007516623301532235,
"grad_norm": 1.6138034582127923,
"learning_rate": 1.4857142857142858e-05,
"loss": 1.7634,
"step": 26
},
{
"epoch": 0.007805724197745013,
"grad_norm": 1.6357966310241434,
"learning_rate": 1.542857142857143e-05,
"loss": 1.7699,
"step": 27
},
{
"epoch": 0.008094825093957791,
"grad_norm": 1.5038647382371504,
"learning_rate": 1.6000000000000003e-05,
"loss": 1.7824,
"step": 28
},
{
"epoch": 0.00838392599017057,
"grad_norm": 1.4835637524801124,
"learning_rate": 1.6571428571428574e-05,
"loss": 1.685,
"step": 29
},
{
"epoch": 0.008673026886383347,
"grad_norm": 1.5071883679399785,
"learning_rate": 1.7142857142857142e-05,
"loss": 1.8,
"step": 30
},
{
"epoch": 0.008962127782596125,
"grad_norm": 1.7942262887809508,
"learning_rate": 1.7714285714285717e-05,
"loss": 1.8129,
"step": 31
},
{
"epoch": 0.009251228678808905,
"grad_norm": 1.6582976562001455,
"learning_rate": 1.8285714285714288e-05,
"loss": 1.8308,
"step": 32
},
{
"epoch": 0.009540329575021683,
"grad_norm": 1.5337861742601142,
"learning_rate": 1.885714285714286e-05,
"loss": 1.7456,
"step": 33
},
{
"epoch": 0.009829430471234461,
"grad_norm": 1.8002402808530826,
"learning_rate": 1.942857142857143e-05,
"loss": 1.9268,
"step": 34
},
{
"epoch": 0.01011853136744724,
"grad_norm": 1.7514076269246104,
"learning_rate": 2e-05,
"loss": 1.652,
"step": 35
},
{
"epoch": 0.010407632263660017,
"grad_norm": 1.8491103433261384,
"learning_rate": 1.9999995790774362e-05,
"loss": 1.8142,
"step": 36
},
{
"epoch": 0.010696733159872795,
"grad_norm": 1.8365212131962536,
"learning_rate": 1.9999983163100983e-05,
"loss": 1.7107,
"step": 37
},
{
"epoch": 0.010985834056085574,
"grad_norm": 1.5873275727358707,
"learning_rate": 1.9999962116990498e-05,
"loss": 1.6696,
"step": 38
},
{
"epoch": 0.011274934952298352,
"grad_norm": 1.7256221512542549,
"learning_rate": 1.999993265246062e-05,
"loss": 1.6901,
"step": 39
},
{
"epoch": 0.01156403584851113,
"grad_norm": 1.8002146086893285,
"learning_rate": 1.9999894769536163e-05,
"loss": 1.8886,
"step": 40
},
{
"epoch": 0.01185313674472391,
"grad_norm": 1.8367868066313766,
"learning_rate": 1.999984846824901e-05,
"loss": 1.7876,
"step": 41
},
{
"epoch": 0.012142237640936688,
"grad_norm": 1.5841989203084583,
"learning_rate": 1.999979374863814e-05,
"loss": 1.9172,
"step": 42
},
{
"epoch": 0.012431338537149466,
"grad_norm": 1.5522062286750515,
"learning_rate": 1.9999730610749623e-05,
"loss": 1.8207,
"step": 43
},
{
"epoch": 0.012720439433362244,
"grad_norm": 1.6373562793041219,
"learning_rate": 1.9999659054636612e-05,
"loss": 1.6358,
"step": 44
},
{
"epoch": 0.013009540329575022,
"grad_norm": 1.4771807978853968,
"learning_rate": 1.9999579080359337e-05,
"loss": 1.6684,
"step": 45
},
{
"epoch": 0.0132986412257878,
"grad_norm": 1.6255295191073982,
"learning_rate": 1.9999490687985134e-05,
"loss": 1.7113,
"step": 46
},
{
"epoch": 0.013587742122000578,
"grad_norm": 1.4924171597100626,
"learning_rate": 1.999939387758841e-05,
"loss": 1.7272,
"step": 47
},
{
"epoch": 0.013876843018213356,
"grad_norm": 1.536593102227256,
"learning_rate": 1.9999288649250667e-05,
"loss": 1.7532,
"step": 48
},
{
"epoch": 0.014165943914426134,
"grad_norm": 1.510648288143349,
"learning_rate": 1.9999175003060487e-05,
"loss": 1.8358,
"step": 49
},
{
"epoch": 0.014455044810638912,
"grad_norm": 1.4755302580340564,
"learning_rate": 1.9999052939113552e-05,
"loss": 1.8305,
"step": 50
},
{
"epoch": 0.014744145706851692,
"grad_norm": 1.659035076331237,
"learning_rate": 1.9998922457512608e-05,
"loss": 1.7916,
"step": 51
},
{
"epoch": 0.01503324660306447,
"grad_norm": 1.4194234922554345,
"learning_rate": 1.9998783558367506e-05,
"loss": 1.6693,
"step": 52
},
{
"epoch": 0.015322347499277248,
"grad_norm": 1.5709438854610118,
"learning_rate": 1.9998636241795184e-05,
"loss": 1.7873,
"step": 53
},
{
"epoch": 0.015611448395490026,
"grad_norm": 1.520579238616479,
"learning_rate": 1.999848050791965e-05,
"loss": 1.6949,
"step": 54
},
{
"epoch": 0.015900549291702804,
"grad_norm": 1.6871179175362505,
"learning_rate": 1.9998316356872015e-05,
"loss": 1.7697,
"step": 55
},
{
"epoch": 0.016189650187915582,
"grad_norm": 1.3949302668328034,
"learning_rate": 1.9998143788790462e-05,
"loss": 1.6176,
"step": 56
},
{
"epoch": 0.01647875108412836,
"grad_norm": 1.76393196989828,
"learning_rate": 1.9997962803820274e-05,
"loss": 1.7379,
"step": 57
},
{
"epoch": 0.01676785198034114,
"grad_norm": 2.3952825919806724,
"learning_rate": 1.9997773402113804e-05,
"loss": 1.8324,
"step": 58
},
{
"epoch": 0.017056952876553916,
"grad_norm": 1.5638827679672842,
"learning_rate": 1.9997575583830508e-05,
"loss": 1.783,
"step": 59
},
{
"epoch": 0.017346053772766695,
"grad_norm": 1.5321008746402438,
"learning_rate": 1.999736934913691e-05,
"loss": 1.7136,
"step": 60
},
{
"epoch": 0.017635154668979473,
"grad_norm": 1.46754278560031,
"learning_rate": 1.9997154698206636e-05,
"loss": 1.7567,
"step": 61
},
{
"epoch": 0.01792425556519225,
"grad_norm": 1.5842416386679679,
"learning_rate": 1.999693163122038e-05,
"loss": 1.757,
"step": 62
},
{
"epoch": 0.01821335646140503,
"grad_norm": 1.5267999996929678,
"learning_rate": 1.9996700148365936e-05,
"loss": 1.768,
"step": 63
},
{
"epoch": 0.01850245735761781,
"grad_norm": 1.6371904649667028,
"learning_rate": 1.9996460249838176e-05,
"loss": 1.8574,
"step": 64
},
{
"epoch": 0.01879155825383059,
"grad_norm": 1.4572549097707117,
"learning_rate": 1.9996211935839053e-05,
"loss": 1.8134,
"step": 65
},
{
"epoch": 0.019080659150043366,
"grad_norm": 1.6026953718941006,
"learning_rate": 1.9995955206577617e-05,
"loss": 1.7718,
"step": 66
},
{
"epoch": 0.019369760046256145,
"grad_norm": 1.4731386293380266,
"learning_rate": 1.9995690062269985e-05,
"loss": 1.7598,
"step": 67
},
{
"epoch": 0.019658860942468923,
"grad_norm": 1.4280673736098164,
"learning_rate": 1.9995416503139372e-05,
"loss": 1.7469,
"step": 68
},
{
"epoch": 0.0199479618386817,
"grad_norm": 1.4976095361519832,
"learning_rate": 1.9995134529416077e-05,
"loss": 1.7258,
"step": 69
},
{
"epoch": 0.02023706273489448,
"grad_norm": 1.3734046074406898,
"learning_rate": 1.9994844141337467e-05,
"loss": 1.6921,
"step": 70
},
{
"epoch": 0.020526163631107257,
"grad_norm": 1.5285691045400083,
"learning_rate": 1.9994545339148017e-05,
"loss": 1.755,
"step": 71
},
{
"epoch": 0.020815264527320035,
"grad_norm": 1.7237565941986015,
"learning_rate": 1.9994238123099264e-05,
"loss": 1.7254,
"step": 72
},
{
"epoch": 0.021104365423532813,
"grad_norm": 1.4511698572347005,
"learning_rate": 1.9993922493449833e-05,
"loss": 1.7339,
"step": 73
},
{
"epoch": 0.02139346631974559,
"grad_norm": 1.3034115810336904,
"learning_rate": 1.9993598450465446e-05,
"loss": 1.8015,
"step": 74
},
{
"epoch": 0.02168256721595837,
"grad_norm": 1.3220719840165054,
"learning_rate": 1.9993265994418887e-05,
"loss": 1.7794,
"step": 75
},
{
"epoch": 0.021971668112171147,
"grad_norm": 1.3514726546077938,
"learning_rate": 1.999292512559004e-05,
"loss": 1.781,
"step": 76
},
{
"epoch": 0.022260769008383925,
"grad_norm": 1.4854967526025784,
"learning_rate": 1.9992575844265857e-05,
"loss": 1.8188,
"step": 77
},
{
"epoch": 0.022549869904596703,
"grad_norm": 1.42527411671424,
"learning_rate": 1.9992218150740386e-05,
"loss": 1.7858,
"step": 78
},
{
"epoch": 0.02283897080080948,
"grad_norm": 1.3690019880914763,
"learning_rate": 1.9991852045314744e-05,
"loss": 1.69,
"step": 79
},
{
"epoch": 0.02312807169702226,
"grad_norm": 1.4796987548843166,
"learning_rate": 1.9991477528297136e-05,
"loss": 1.7501,
"step": 80
},
{
"epoch": 0.023417172593235037,
"grad_norm": 1.4237249543510602,
"learning_rate": 1.999109460000285e-05,
"loss": 1.7634,
"step": 81
},
{
"epoch": 0.02370627348944782,
"grad_norm": 1.3087042024971207,
"learning_rate": 1.9990703260754248e-05,
"loss": 1.8106,
"step": 82
},
{
"epoch": 0.023995374385660597,
"grad_norm": 1.4158584649264259,
"learning_rate": 1.999030351088078e-05,
"loss": 1.7503,
"step": 83
},
{
"epoch": 0.024284475281873375,
"grad_norm": 1.4266437173978106,
"learning_rate": 1.9989895350718972e-05,
"loss": 1.6435,
"step": 84
},
{
"epoch": 0.024573576178086153,
"grad_norm": 1.519614177980851,
"learning_rate": 1.9989478780612434e-05,
"loss": 1.7155,
"step": 85
},
{
"epoch": 0.02486267707429893,
"grad_norm": 1.4829974498829237,
"learning_rate": 1.9989053800911854e-05,
"loss": 1.7406,
"step": 86
},
{
"epoch": 0.02515177797051171,
"grad_norm": 1.5340468112410737,
"learning_rate": 1.9988620411974994e-05,
"loss": 1.6487,
"step": 87
},
{
"epoch": 0.025440878866724487,
"grad_norm": 1.3928928308705317,
"learning_rate": 1.9988178614166706e-05,
"loss": 1.7535,
"step": 88
},
{
"epoch": 0.025729979762937265,
"grad_norm": 1.4648291742355268,
"learning_rate": 1.9987728407858912e-05,
"loss": 1.6977,
"step": 89
},
{
"epoch": 0.026019080659150044,
"grad_norm": 1.5330205874026324,
"learning_rate": 1.9987269793430618e-05,
"loss": 1.8134,
"step": 90
},
{
"epoch": 0.02630818155536282,
"grad_norm": 1.4322119486638492,
"learning_rate": 1.9986802771267902e-05,
"loss": 1.7493,
"step": 91
},
{
"epoch": 0.0265972824515756,
"grad_norm": 1.3741952459535092,
"learning_rate": 1.9986327341763933e-05,
"loss": 1.7498,
"step": 92
},
{
"epoch": 0.026886383347788378,
"grad_norm": 1.4439173869710846,
"learning_rate": 1.998584350531894e-05,
"loss": 1.8364,
"step": 93
},
{
"epoch": 0.027175484244001156,
"grad_norm": 1.5238329839004898,
"learning_rate": 1.9985351262340242e-05,
"loss": 1.7025,
"step": 94
},
{
"epoch": 0.027464585140213934,
"grad_norm": 1.4211403871339836,
"learning_rate": 1.998485061324223e-05,
"loss": 1.8061,
"step": 95
},
{
"epoch": 0.027753686036426712,
"grad_norm": 1.4891220326396055,
"learning_rate": 1.998434155844638e-05,
"loss": 1.7756,
"step": 96
},
{
"epoch": 0.02804278693263949,
"grad_norm": 1.4672105325874756,
"learning_rate": 1.9983824098381226e-05,
"loss": 1.7642,
"step": 97
},
{
"epoch": 0.028331887828852268,
"grad_norm": 1.4102584299249543,
"learning_rate": 1.9983298233482396e-05,
"loss": 1.7302,
"step": 98
},
{
"epoch": 0.028620988725065046,
"grad_norm": 1.4570496244711266,
"learning_rate": 1.9982763964192586e-05,
"loss": 1.7299,
"step": 99
},
{
"epoch": 0.028910089621277824,
"grad_norm": 1.4536709099623053,
"learning_rate": 1.9982221290961568e-05,
"loss": 1.7647,
"step": 100
},
{
"epoch": 0.029199190517490606,
"grad_norm": 1.427202594686143,
"learning_rate": 1.9981670214246186e-05,
"loss": 1.7361,
"step": 101
},
{
"epoch": 0.029488291413703384,
"grad_norm": 1.4166737515686485,
"learning_rate": 1.9981110734510368e-05,
"loss": 1.7107,
"step": 102
},
{
"epoch": 0.029777392309916162,
"grad_norm": 1.4291514974937183,
"learning_rate": 1.99805428522251e-05,
"loss": 1.7162,
"step": 103
},
{
"epoch": 0.03006649320612894,
"grad_norm": 1.3691457808724619,
"learning_rate": 1.997996656786846e-05,
"loss": 1.8823,
"step": 104
},
{
"epoch": 0.030355594102341718,
"grad_norm": 1.38193263647762,
"learning_rate": 1.9979381881925587e-05,
"loss": 1.6658,
"step": 105
},
{
"epoch": 0.030644694998554496,
"grad_norm": 1.4331634763444776,
"learning_rate": 1.997878879488869e-05,
"loss": 1.7695,
"step": 106
},
{
"epoch": 0.030933795894767274,
"grad_norm": 1.4389434938173666,
"learning_rate": 1.9978187307257064e-05,
"loss": 1.7356,
"step": 107
},
{
"epoch": 0.031222896790980052,
"grad_norm": 1.4057807430118254,
"learning_rate": 1.9977577419537066e-05,
"loss": 1.7191,
"step": 108
},
{
"epoch": 0.03151199768719283,
"grad_norm": 1.4476225746298625,
"learning_rate": 1.9976959132242128e-05,
"loss": 1.8028,
"step": 109
},
{
"epoch": 0.03180109858340561,
"grad_norm": 1.3595624610249801,
"learning_rate": 1.997633244589275e-05,
"loss": 1.708,
"step": 110
},
{
"epoch": 0.032090199479618386,
"grad_norm": 1.5622440441344578,
"learning_rate": 1.997569736101651e-05,
"loss": 1.7138,
"step": 111
},
{
"epoch": 0.032379300375831165,
"grad_norm": 1.4451837327532462,
"learning_rate": 1.9975053878148045e-05,
"loss": 1.745,
"step": 112
},
{
"epoch": 0.03266840127204394,
"grad_norm": 1.3461945134165731,
"learning_rate": 1.9974401997829063e-05,
"loss": 1.792,
"step": 113
},
{
"epoch": 0.03295750216825672,
"grad_norm": 1.4916956994354813,
"learning_rate": 1.997374172060836e-05,
"loss": 1.7312,
"step": 114
},
{
"epoch": 0.0332466030644695,
"grad_norm": 1.3473635350533286,
"learning_rate": 1.997307304704178e-05,
"loss": 1.7257,
"step": 115
},
{
"epoch": 0.03353570396068228,
"grad_norm": 1.5308835567804706,
"learning_rate": 1.9972395977692243e-05,
"loss": 1.8833,
"step": 116
},
{
"epoch": 0.033824804856895055,
"grad_norm": 1.4933799858376915,
"learning_rate": 1.9971710513129735e-05,
"loss": 1.8031,
"step": 117
},
{
"epoch": 0.03411390575310783,
"grad_norm": 1.4666676384345598,
"learning_rate": 1.9971016653931315e-05,
"loss": 1.699,
"step": 118
},
{
"epoch": 0.03440300664932061,
"grad_norm": 1.376396676563015,
"learning_rate": 1.9970314400681098e-05,
"loss": 1.7561,
"step": 119
},
{
"epoch": 0.03469210754553339,
"grad_norm": 1.4309738039308064,
"learning_rate": 1.9969603753970285e-05,
"loss": 1.7812,
"step": 120
},
{
"epoch": 0.03498120844174617,
"grad_norm": 1.4395837496367765,
"learning_rate": 1.9968884714397116e-05,
"loss": 1.6863,
"step": 121
},
{
"epoch": 0.035270309337958945,
"grad_norm": 1.3440476522906626,
"learning_rate": 1.9968157282566917e-05,
"loss": 1.7026,
"step": 122
},
{
"epoch": 0.03555941023417172,
"grad_norm": 1.3253598540978417,
"learning_rate": 1.9967421459092076e-05,
"loss": 1.6773,
"step": 123
},
{
"epoch": 0.0358485111303845,
"grad_norm": 1.4654991309929606,
"learning_rate": 1.996667724459204e-05,
"loss": 1.6995,
"step": 124
},
{
"epoch": 0.03613761202659728,
"grad_norm": 1.3695366677193352,
"learning_rate": 1.9965924639693323e-05,
"loss": 1.8028,
"step": 125
},
{
"epoch": 0.03642671292281006,
"grad_norm": 1.5996594030035125,
"learning_rate": 1.9965163645029496e-05,
"loss": 1.6509,
"step": 126
},
{
"epoch": 0.036715813819022836,
"grad_norm": 1.3638136094831714,
"learning_rate": 1.9964394261241205e-05,
"loss": 1.7974,
"step": 127
},
{
"epoch": 0.03700491471523562,
"grad_norm": 1.5950089807963124,
"learning_rate": 1.9963616488976156e-05,
"loss": 1.7817,
"step": 128
},
{
"epoch": 0.0372940156114484,
"grad_norm": 1.3874690940551964,
"learning_rate": 1.9962830328889104e-05,
"loss": 1.7727,
"step": 129
},
{
"epoch": 0.03758311650766118,
"grad_norm": 1.428332455733304,
"learning_rate": 1.9962035781641877e-05,
"loss": 1.8162,
"step": 130
},
{
"epoch": 0.037872217403873955,
"grad_norm": 1.413377966510162,
"learning_rate": 1.996123284790336e-05,
"loss": 1.6724,
"step": 131
},
{
"epoch": 0.03816131830008673,
"grad_norm": 1.4114781128468488,
"learning_rate": 1.9960421528349503e-05,
"loss": 1.6688,
"step": 132
},
{
"epoch": 0.03845041919629951,
"grad_norm": 1.4667937866255885,
"learning_rate": 1.9959601823663305e-05,
"loss": 1.7404,
"step": 133
},
{
"epoch": 0.03873952009251229,
"grad_norm": 1.3864312896377629,
"learning_rate": 1.9958773734534834e-05,
"loss": 1.6797,
"step": 134
},
{
"epoch": 0.03902862098872507,
"grad_norm": 1.5863275696219379,
"learning_rate": 1.9957937261661215e-05,
"loss": 1.7093,
"step": 135
},
{
"epoch": 0.039317721884937845,
"grad_norm": 1.3943132521368478,
"learning_rate": 1.9957092405746628e-05,
"loss": 1.7466,
"step": 136
},
{
"epoch": 0.03960682278115062,
"grad_norm": 1.4418545458171168,
"learning_rate": 1.9956239167502304e-05,
"loss": 1.8363,
"step": 137
},
{
"epoch": 0.0398959236773634,
"grad_norm": 1.6597022379228474,
"learning_rate": 1.9955377547646546e-05,
"loss": 1.7726,
"step": 138
},
{
"epoch": 0.04018502457357618,
"grad_norm": 1.374590779273713,
"learning_rate": 1.9954507546904697e-05,
"loss": 1.7559,
"step": 139
},
{
"epoch": 0.04047412546978896,
"grad_norm": 1.2866499565497842,
"learning_rate": 1.995362916600917e-05,
"loss": 1.7478,
"step": 140
},
{
"epoch": 0.040763226366001735,
"grad_norm": 1.3549533524362687,
"learning_rate": 1.9952742405699425e-05,
"loss": 1.8389,
"step": 141
},
{
"epoch": 0.041052327262214514,
"grad_norm": 1.3512745611701633,
"learning_rate": 1.995184726672197e-05,
"loss": 1.7975,
"step": 142
},
{
"epoch": 0.04134142815842729,
"grad_norm": 1.3460191452855619,
"learning_rate": 1.995094374983038e-05,
"loss": 1.7245,
"step": 143
},
{
"epoch": 0.04163052905464007,
"grad_norm": 1.3305167255129993,
"learning_rate": 1.9950031855785276e-05,
"loss": 1.6294,
"step": 144
},
{
"epoch": 0.04191962995085285,
"grad_norm": 1.34284871707125,
"learning_rate": 1.9949111585354328e-05,
"loss": 1.7124,
"step": 145
},
{
"epoch": 0.042208730847065626,
"grad_norm": 1.3772925926259127,
"learning_rate": 1.9948182939312258e-05,
"loss": 1.7095,
"step": 146
},
{
"epoch": 0.042497831743278404,
"grad_norm": 1.384298315882825,
"learning_rate": 1.994724591844085e-05,
"loss": 1.8031,
"step": 147
},
{
"epoch": 0.04278693263949118,
"grad_norm": 1.3740208297445895,
"learning_rate": 1.994630052352893e-05,
"loss": 1.7814,
"step": 148
},
{
"epoch": 0.04307603353570396,
"grad_norm": 1.3187990568089156,
"learning_rate": 1.9945346755372367e-05,
"loss": 1.7486,
"step": 149
},
{
"epoch": 0.04336513443191674,
"grad_norm": 1.3347832866557865,
"learning_rate": 1.9944384614774095e-05,
"loss": 1.7225,
"step": 150
},
{
"epoch": 0.043654235328129516,
"grad_norm": 1.3850913268385951,
"learning_rate": 1.9943414102544083e-05,
"loss": 1.7608,
"step": 151
},
{
"epoch": 0.043943336224342294,
"grad_norm": 1.3180540802523992,
"learning_rate": 1.994243521949935e-05,
"loss": 1.7534,
"step": 152
},
{
"epoch": 0.04423243712055507,
"grad_norm": 1.3989923351107776,
"learning_rate": 1.9941447966463966e-05,
"loss": 1.7734,
"step": 153
},
{
"epoch": 0.04452153801676785,
"grad_norm": 1.2709853505702844,
"learning_rate": 1.9940452344269045e-05,
"loss": 1.7463,
"step": 154
},
{
"epoch": 0.04481063891298063,
"grad_norm": 1.3117490865184025,
"learning_rate": 1.9939448353752745e-05,
"loss": 1.7484,
"step": 155
},
{
"epoch": 0.045099739809193407,
"grad_norm": 1.3068520843456393,
"learning_rate": 1.9938435995760275e-05,
"loss": 1.7793,
"step": 156
},
{
"epoch": 0.045388840705406185,
"grad_norm": 1.2903723753260385,
"learning_rate": 1.993741527114388e-05,
"loss": 1.7654,
"step": 157
},
{
"epoch": 0.04567794160161896,
"grad_norm": 1.3281351334689,
"learning_rate": 1.993638618076285e-05,
"loss": 1.7574,
"step": 158
},
{
"epoch": 0.04596704249783174,
"grad_norm": 1.331538167128145,
"learning_rate": 1.9935348725483526e-05,
"loss": 1.7431,
"step": 159
},
{
"epoch": 0.04625614339404452,
"grad_norm": 1.4516409761685403,
"learning_rate": 1.9934302906179285e-05,
"loss": 1.7334,
"step": 160
},
{
"epoch": 0.0465452442902573,
"grad_norm": 1.2977322772074695,
"learning_rate": 1.9933248723730536e-05,
"loss": 1.6306,
"step": 161
},
{
"epoch": 0.046834345186470075,
"grad_norm": 1.357062910781076,
"learning_rate": 1.9932186179024744e-05,
"loss": 1.668,
"step": 162
},
{
"epoch": 0.04712344608268285,
"grad_norm": 1.3803460903193827,
"learning_rate": 1.9931115272956405e-05,
"loss": 1.7275,
"step": 163
},
{
"epoch": 0.04741254697889564,
"grad_norm": 1.315843862445011,
"learning_rate": 1.9930036006427058e-05,
"loss": 1.6444,
"step": 164
},
{
"epoch": 0.047701647875108416,
"grad_norm": 1.4617283691426304,
"learning_rate": 1.9928948380345274e-05,
"loss": 1.8109,
"step": 165
},
{
"epoch": 0.047990748771321194,
"grad_norm": 1.38383885394047,
"learning_rate": 1.9927852395626672e-05,
"loss": 1.7686,
"step": 166
},
{
"epoch": 0.04827984966753397,
"grad_norm": 1.458724964408903,
"learning_rate": 1.9926748053193898e-05,
"loss": 1.6962,
"step": 167
},
{
"epoch": 0.04856895056374675,
"grad_norm": 1.3411049270177544,
"learning_rate": 1.9925635353976634e-05,
"loss": 1.6548,
"step": 168
},
{
"epoch": 0.04885805145995953,
"grad_norm": 1.4316515117975288,
"learning_rate": 1.992451429891161e-05,
"loss": 1.7439,
"step": 169
},
{
"epoch": 0.049147152356172306,
"grad_norm": 1.3310049050553332,
"learning_rate": 1.9923384888942568e-05,
"loss": 1.7042,
"step": 170
},
{
"epoch": 0.049436253252385085,
"grad_norm": 1.411118381246596,
"learning_rate": 1.9922247125020307e-05,
"loss": 1.7172,
"step": 171
},
{
"epoch": 0.04972535414859786,
"grad_norm": 1.2882521449870807,
"learning_rate": 1.992110100810264e-05,
"loss": 1.7679,
"step": 172
},
{
"epoch": 0.05001445504481064,
"grad_norm": 1.3412263735063734,
"learning_rate": 1.991994653915443e-05,
"loss": 1.6375,
"step": 173
},
{
"epoch": 0.05030355594102342,
"grad_norm": 1.2875141274832036,
"learning_rate": 1.991878371914755e-05,
"loss": 1.6816,
"step": 174
},
{
"epoch": 0.0505926568372362,
"grad_norm": 1.2707786543811448,
"learning_rate": 1.991761254906092e-05,
"loss": 1.7382,
"step": 175
},
{
"epoch": 0.050881757733448975,
"grad_norm": 1.2815480979153002,
"learning_rate": 1.9916433029880485e-05,
"loss": 1.7519,
"step": 176
},
{
"epoch": 0.05117085862966175,
"grad_norm": 1.358987922639111,
"learning_rate": 1.991524516259921e-05,
"loss": 1.7895,
"step": 177
},
{
"epoch": 0.05145995952587453,
"grad_norm": 1.2688791830287245,
"learning_rate": 1.9914048948217105e-05,
"loss": 1.776,
"step": 178
},
{
"epoch": 0.05174906042208731,
"grad_norm": 1.2813810920416182,
"learning_rate": 1.9912844387741194e-05,
"loss": 1.7085,
"step": 179
},
{
"epoch": 0.05203816131830009,
"grad_norm": 1.2916088597948596,
"learning_rate": 1.991163148218553e-05,
"loss": 1.7124,
"step": 180
},
{
"epoch": 0.052327262214512865,
"grad_norm": 1.310527847083729,
"learning_rate": 1.9910410232571187e-05,
"loss": 1.6333,
"step": 181
},
{
"epoch": 0.05261636311072564,
"grad_norm": 1.3292526744086122,
"learning_rate": 1.9909180639926274e-05,
"loss": 1.663,
"step": 182
},
{
"epoch": 0.05290546400693842,
"grad_norm": 1.4107465041661023,
"learning_rate": 1.990794270528592e-05,
"loss": 1.6939,
"step": 183
},
{
"epoch": 0.0531945649031512,
"grad_norm": 1.5697948856294517,
"learning_rate": 1.9906696429692265e-05,
"loss": 1.79,
"step": 184
},
{
"epoch": 0.05348366579936398,
"grad_norm": 1.359687672105227,
"learning_rate": 1.9905441814194482e-05,
"loss": 1.6024,
"step": 185
},
{
"epoch": 0.053772766695576756,
"grad_norm": 1.435220148856358,
"learning_rate": 1.990417885984877e-05,
"loss": 1.6539,
"step": 186
},
{
"epoch": 0.054061867591789534,
"grad_norm": 1.4702625299572514,
"learning_rate": 1.990290756771834e-05,
"loss": 1.7306,
"step": 187
},
{
"epoch": 0.05435096848800231,
"grad_norm": 1.5247866204720824,
"learning_rate": 1.9901627938873416e-05,
"loss": 1.7609,
"step": 188
},
{
"epoch": 0.05464006938421509,
"grad_norm": 1.3031717794857007,
"learning_rate": 1.9900339974391252e-05,
"loss": 1.7389,
"step": 189
},
{
"epoch": 0.05492917028042787,
"grad_norm": 1.3511696476383066,
"learning_rate": 1.9899043675356114e-05,
"loss": 1.7297,
"step": 190
},
{
"epoch": 0.055218271176640646,
"grad_norm": 1.4509301331653846,
"learning_rate": 1.9897739042859285e-05,
"loss": 1.737,
"step": 191
},
{
"epoch": 0.055507372072853424,
"grad_norm": 1.3380070550795329,
"learning_rate": 1.9896426077999062e-05,
"loss": 1.6662,
"step": 192
},
{
"epoch": 0.0557964729690662,
"grad_norm": 1.3684181423939343,
"learning_rate": 1.989510478188076e-05,
"loss": 1.7297,
"step": 193
},
{
"epoch": 0.05608557386527898,
"grad_norm": 1.4948293977771248,
"learning_rate": 1.9893775155616704e-05,
"loss": 1.7733,
"step": 194
},
{
"epoch": 0.05637467476149176,
"grad_norm": 6.056711246922273,
"learning_rate": 1.989243720032624e-05,
"loss": 1.8127,
"step": 195
},
{
"epoch": 0.056663775657704536,
"grad_norm": 1.4031162820625223,
"learning_rate": 1.989109091713571e-05,
"loss": 1.7476,
"step": 196
},
{
"epoch": 0.056952876553917314,
"grad_norm": 1.388743000455743,
"learning_rate": 1.988973630717848e-05,
"loss": 1.7256,
"step": 197
},
{
"epoch": 0.05724197745013009,
"grad_norm": 1.3228310217145096,
"learning_rate": 1.9888373371594923e-05,
"loss": 1.7153,
"step": 198
},
{
"epoch": 0.05753107834634287,
"grad_norm": 1.349390586328493,
"learning_rate": 1.9887002111532413e-05,
"loss": 1.7913,
"step": 199
},
{
"epoch": 0.05782017924255565,
"grad_norm": 1.3218835310967434,
"learning_rate": 1.9885622528145346e-05,
"loss": 1.893,
"step": 200
},
{
"epoch": 0.058109280138768434,
"grad_norm": 1.3066393320266039,
"learning_rate": 1.9884234622595117e-05,
"loss": 1.7445,
"step": 201
},
{
"epoch": 0.05839838103498121,
"grad_norm": 1.3431846081510803,
"learning_rate": 1.988283839605013e-05,
"loss": 1.764,
"step": 202
},
{
"epoch": 0.05868748193119399,
"grad_norm": 1.2716836951252142,
"learning_rate": 1.988143384968578e-05,
"loss": 1.689,
"step": 203
},
{
"epoch": 0.05897658282740677,
"grad_norm": 1.3463201818447268,
"learning_rate": 1.9880020984684486e-05,
"loss": 1.6615,
"step": 204
},
{
"epoch": 0.059265683723619546,
"grad_norm": 1.3067391536442128,
"learning_rate": 1.9878599802235662e-05,
"loss": 1.683,
"step": 205
},
{
"epoch": 0.059554784619832324,
"grad_norm": 1.407390031033922,
"learning_rate": 1.987717030353572e-05,
"loss": 1.7617,
"step": 206
},
{
"epoch": 0.0598438855160451,
"grad_norm": 1.2785798130045682,
"learning_rate": 1.9875732489788082e-05,
"loss": 1.6828,
"step": 207
},
{
"epoch": 0.06013298641225788,
"grad_norm": 1.581333677513783,
"learning_rate": 1.9874286362203162e-05,
"loss": 1.776,
"step": 208
},
{
"epoch": 0.06042208730847066,
"grad_norm": 1.3170624490173282,
"learning_rate": 1.987283192199837e-05,
"loss": 1.7548,
"step": 209
},
{
"epoch": 0.060711188204683436,
"grad_norm": 1.6773819921976063,
"learning_rate": 1.9871369170398126e-05,
"loss": 1.7935,
"step": 210
},
{
"epoch": 0.061000289100896214,
"grad_norm": 1.4063651245701678,
"learning_rate": 1.9869898108633834e-05,
"loss": 1.7806,
"step": 211
},
{
"epoch": 0.06128938999710899,
"grad_norm": 1.3241937165367996,
"learning_rate": 1.986841873794391e-05,
"loss": 1.7262,
"step": 212
},
{
"epoch": 0.06157849089332177,
"grad_norm": 1.49389208627189,
"learning_rate": 1.9866931059573748e-05,
"loss": 1.7103,
"step": 213
},
{
"epoch": 0.06186759178953455,
"grad_norm": 1.219442276366396,
"learning_rate": 1.9865435074775744e-05,
"loss": 1.6194,
"step": 214
},
{
"epoch": 0.062156692685747326,
"grad_norm": 1.3453821292169061,
"learning_rate": 1.9863930784809284e-05,
"loss": 1.8086,
"step": 215
},
{
"epoch": 0.062445793581960105,
"grad_norm": 1.2961743317843437,
"learning_rate": 1.986241819094075e-05,
"loss": 1.6352,
"step": 216
},
{
"epoch": 0.06273489447817288,
"grad_norm": 1.3729904036841547,
"learning_rate": 1.986089729444351e-05,
"loss": 1.7041,
"step": 217
},
{
"epoch": 0.06302399537438566,
"grad_norm": 1.3443809779979903,
"learning_rate": 1.9859368096597925e-05,
"loss": 1.7332,
"step": 218
},
{
"epoch": 0.06331309627059845,
"grad_norm": 1.3367070791840931,
"learning_rate": 1.9857830598691337e-05,
"loss": 1.639,
"step": 219
},
{
"epoch": 0.06360219716681122,
"grad_norm": 1.3799415387024292,
"learning_rate": 1.9856284802018087e-05,
"loss": 1.7675,
"step": 220
},
{
"epoch": 0.063891298063024,
"grad_norm": 1.3464389753341524,
"learning_rate": 1.9854730707879502e-05,
"loss": 1.6702,
"step": 221
},
{
"epoch": 0.06418039895923677,
"grad_norm": 1.3867842033565003,
"learning_rate": 1.9853168317583874e-05,
"loss": 1.7216,
"step": 222
},
{
"epoch": 0.06446949985544956,
"grad_norm": 1.2892516322419894,
"learning_rate": 1.9851597632446508e-05,
"loss": 1.6755,
"step": 223
},
{
"epoch": 0.06475860075166233,
"grad_norm": 1.4307425363074875,
"learning_rate": 1.9850018653789665e-05,
"loss": 1.7618,
"step": 224
},
{
"epoch": 0.06504770164787511,
"grad_norm": 1.288672937098248,
"learning_rate": 1.984843138294261e-05,
"loss": 1.756,
"step": 225
},
{
"epoch": 0.06533680254408789,
"grad_norm": 1.3540206108809039,
"learning_rate": 1.9846835821241573e-05,
"loss": 1.7373,
"step": 226
},
{
"epoch": 0.06562590344030067,
"grad_norm": 1.3108759943656667,
"learning_rate": 1.9845231970029774e-05,
"loss": 1.6429,
"step": 227
},
{
"epoch": 0.06591500433651344,
"grad_norm": 1.3549691917787245,
"learning_rate": 1.9843619830657404e-05,
"loss": 1.6692,
"step": 228
},
{
"epoch": 0.06620410523272623,
"grad_norm": 1.350566839669386,
"learning_rate": 1.984199940448164e-05,
"loss": 1.6499,
"step": 229
},
{
"epoch": 0.066493206128939,
"grad_norm": 1.3992430914185707,
"learning_rate": 1.9840370692866624e-05,
"loss": 1.7279,
"step": 230
},
{
"epoch": 0.06678230702515178,
"grad_norm": 1.336273448437935,
"learning_rate": 1.9838733697183482e-05,
"loss": 1.7378,
"step": 231
},
{
"epoch": 0.06707140792136455,
"grad_norm": 1.3714334108906678,
"learning_rate": 1.9837088418810312e-05,
"loss": 1.6845,
"step": 232
},
{
"epoch": 0.06736050881757734,
"grad_norm": 1.331192814515942,
"learning_rate": 1.983543485913218e-05,
"loss": 1.7605,
"step": 233
},
{
"epoch": 0.06764960971379011,
"grad_norm": 1.3590047127242209,
"learning_rate": 1.983377301954113e-05,
"loss": 1.7027,
"step": 234
},
{
"epoch": 0.0679387106100029,
"grad_norm": 1.2628103031697302,
"learning_rate": 1.983210290143617e-05,
"loss": 1.6682,
"step": 235
},
{
"epoch": 0.06822781150621567,
"grad_norm": 1.31345700201241,
"learning_rate": 1.9830424506223286e-05,
"loss": 1.7787,
"step": 236
},
{
"epoch": 0.06851691240242845,
"grad_norm": 1.367333629142093,
"learning_rate": 1.9828737835315422e-05,
"loss": 1.7182,
"step": 237
},
{
"epoch": 0.06880601329864122,
"grad_norm": 1.3814327986016506,
"learning_rate": 1.9827042890132498e-05,
"loss": 1.8058,
"step": 238
},
{
"epoch": 0.06909511419485401,
"grad_norm": 1.2616372891530419,
"learning_rate": 1.982533967210139e-05,
"loss": 1.7131,
"step": 239
},
{
"epoch": 0.06938421509106678,
"grad_norm": 1.3695087479555836,
"learning_rate": 1.982362818265595e-05,
"loss": 1.7163,
"step": 240
},
{
"epoch": 0.06967331598727956,
"grad_norm": 1.3557868876434347,
"learning_rate": 1.9821908423236983e-05,
"loss": 1.7421,
"step": 241
},
{
"epoch": 0.06996241688349233,
"grad_norm": 1.392442344450402,
"learning_rate": 1.982018039529226e-05,
"loss": 1.747,
"step": 242
},
{
"epoch": 0.07025151777970512,
"grad_norm": 1.4471159007355705,
"learning_rate": 1.9818444100276517e-05,
"loss": 1.6452,
"step": 243
},
{
"epoch": 0.07054061867591789,
"grad_norm": 1.323270395385573,
"learning_rate": 1.981669953965144e-05,
"loss": 1.795,
"step": 244
},
{
"epoch": 0.07082971957213068,
"grad_norm": 1.32959668728154,
"learning_rate": 1.9814946714885682e-05,
"loss": 1.8027,
"step": 245
},
{
"epoch": 0.07111882046834345,
"grad_norm": 1.3027587139671195,
"learning_rate": 1.981318562745485e-05,
"loss": 1.7326,
"step": 246
},
{
"epoch": 0.07140792136455623,
"grad_norm": 1.328453385919723,
"learning_rate": 1.9811416278841507e-05,
"loss": 1.6839,
"step": 247
},
{
"epoch": 0.071697022260769,
"grad_norm": 1.2271866822510065,
"learning_rate": 1.980963867053517e-05,
"loss": 1.6021,
"step": 248
},
{
"epoch": 0.07198612315698179,
"grad_norm": 1.2908284006992323,
"learning_rate": 1.9807852804032306e-05,
"loss": 1.6339,
"step": 249
},
{
"epoch": 0.07227522405319456,
"grad_norm": 1.373522694157661,
"learning_rate": 1.9806058680836343e-05,
"loss": 1.6902,
"step": 250
},
{
"epoch": 0.07256432494940734,
"grad_norm": 1.2634235256940958,
"learning_rate": 1.9804256302457653e-05,
"loss": 1.6517,
"step": 251
},
{
"epoch": 0.07285342584562012,
"grad_norm": 1.3278784508700443,
"learning_rate": 1.9802445670413562e-05,
"loss": 1.7972,
"step": 252
},
{
"epoch": 0.0731425267418329,
"grad_norm": 1.3552386657179452,
"learning_rate": 1.980062678622834e-05,
"loss": 1.726,
"step": 253
},
{
"epoch": 0.07343162763804567,
"grad_norm": 1.3742079147545718,
"learning_rate": 1.9798799651433204e-05,
"loss": 1.7317,
"step": 254
},
{
"epoch": 0.07372072853425846,
"grad_norm": 1.4074238143735376,
"learning_rate": 1.9796964267566322e-05,
"loss": 1.7275,
"step": 255
},
{
"epoch": 0.07400982943047124,
"grad_norm": 1.286876584367334,
"learning_rate": 1.9795120636172804e-05,
"loss": 1.6061,
"step": 256
},
{
"epoch": 0.07429893032668401,
"grad_norm": 1.3521349118231711,
"learning_rate": 1.9793268758804693e-05,
"loss": 1.7237,
"step": 257
},
{
"epoch": 0.0745880312228968,
"grad_norm": 1.2816583167182087,
"learning_rate": 1.979140863702099e-05,
"loss": 1.6783,
"step": 258
},
{
"epoch": 0.07487713211910957,
"grad_norm": 1.3634926357253074,
"learning_rate": 1.978954027238763e-05,
"loss": 1.6784,
"step": 259
},
{
"epoch": 0.07516623301532235,
"grad_norm": 1.4038229521295733,
"learning_rate": 1.9787663666477486e-05,
"loss": 1.6535,
"step": 260
},
{
"epoch": 0.07545533391153512,
"grad_norm": 1.2842218606303095,
"learning_rate": 1.9785778820870366e-05,
"loss": 1.7058,
"step": 261
},
{
"epoch": 0.07574443480774791,
"grad_norm": 1.347020075001843,
"learning_rate": 1.9783885737153022e-05,
"loss": 1.7832,
"step": 262
},
{
"epoch": 0.07603353570396068,
"grad_norm": 1.26053283819741,
"learning_rate": 1.9781984416919138e-05,
"loss": 1.644,
"step": 263
},
{
"epoch": 0.07632263660017347,
"grad_norm": 1.2495419382455348,
"learning_rate": 1.9780074861769328e-05,
"loss": 1.6807,
"step": 264
},
{
"epoch": 0.07661173749638624,
"grad_norm": 1.7139411191235945,
"learning_rate": 1.977815707331114e-05,
"loss": 1.7222,
"step": 265
},
{
"epoch": 0.07690083839259902,
"grad_norm": 1.4547863480275938,
"learning_rate": 1.9776231053159063e-05,
"loss": 1.7774,
"step": 266
},
{
"epoch": 0.07718993928881179,
"grad_norm": 1.3437347227693044,
"learning_rate": 1.97742968029345e-05,
"loss": 1.7208,
"step": 267
},
{
"epoch": 0.07747904018502458,
"grad_norm": 1.354739938343438,
"learning_rate": 1.9772354324265796e-05,
"loss": 1.6918,
"step": 268
},
{
"epoch": 0.07776814108123735,
"grad_norm": 1.2544728177655773,
"learning_rate": 1.977040361878821e-05,
"loss": 1.6351,
"step": 269
},
{
"epoch": 0.07805724197745013,
"grad_norm": 1.3417761983959458,
"learning_rate": 1.9768444688143938e-05,
"loss": 1.6564,
"step": 270
},
{
"epoch": 0.0783463428736629,
"grad_norm": 1.3422116229415715,
"learning_rate": 1.9766477533982094e-05,
"loss": 1.8033,
"step": 271
},
{
"epoch": 0.07863544376987569,
"grad_norm": 1.2706457122008528,
"learning_rate": 1.976450215795872e-05,
"loss": 1.6505,
"step": 272
},
{
"epoch": 0.07892454466608846,
"grad_norm": 1.267352186352676,
"learning_rate": 1.9762518561736782e-05,
"loss": 1.6591,
"step": 273
},
{
"epoch": 0.07921364556230125,
"grad_norm": 1.4466694988557338,
"learning_rate": 1.976052674698615e-05,
"loss": 1.7952,
"step": 274
},
{
"epoch": 0.07950274645851402,
"grad_norm": 1.321832538613218,
"learning_rate": 1.975852671538363e-05,
"loss": 1.7061,
"step": 275
},
{
"epoch": 0.0797918473547268,
"grad_norm": 1.2829050605849048,
"learning_rate": 1.9756518468612934e-05,
"loss": 1.6605,
"step": 276
},
{
"epoch": 0.08008094825093957,
"grad_norm": 1.2987764926867447,
"learning_rate": 1.9754502008364702e-05,
"loss": 1.7047,
"step": 277
},
{
"epoch": 0.08037004914715236,
"grad_norm": 1.2552473311885344,
"learning_rate": 1.9752477336336473e-05,
"loss": 1.7031,
"step": 278
},
{
"epoch": 0.08065915004336513,
"grad_norm": 1.1985068633897615,
"learning_rate": 1.9750444454232714e-05,
"loss": 1.7382,
"step": 279
},
{
"epoch": 0.08094825093957791,
"grad_norm": 1.314553485624065,
"learning_rate": 1.9748403363764795e-05,
"loss": 1.6491,
"step": 280
},
{
"epoch": 0.08123735183579069,
"grad_norm": 1.294998222736541,
"learning_rate": 1.9746354066650995e-05,
"loss": 1.7044,
"step": 281
},
{
"epoch": 0.08152645273200347,
"grad_norm": 1.3584436916798586,
"learning_rate": 1.974429656461651e-05,
"loss": 1.6672,
"step": 282
},
{
"epoch": 0.08181555362821624,
"grad_norm": 1.3102650044903212,
"learning_rate": 1.9742230859393432e-05,
"loss": 1.6954,
"step": 283
},
{
"epoch": 0.08210465452442903,
"grad_norm": 2.693221641429579,
"learning_rate": 1.974015695272077e-05,
"loss": 1.6296,
"step": 284
},
{
"epoch": 0.0823937554206418,
"grad_norm": 1.3351424022555927,
"learning_rate": 1.9738074846344427e-05,
"loss": 1.6594,
"step": 285
},
{
"epoch": 0.08268285631685458,
"grad_norm": 1.2766910430053877,
"learning_rate": 1.973598454201722e-05,
"loss": 1.6983,
"step": 286
},
{
"epoch": 0.08297195721306735,
"grad_norm": 1.4668406259400848,
"learning_rate": 1.973388604149886e-05,
"loss": 1.6698,
"step": 287
},
{
"epoch": 0.08326105810928014,
"grad_norm": 1.3104546778761357,
"learning_rate": 1.9731779346555957e-05,
"loss": 1.6723,
"step": 288
},
{
"epoch": 0.08355015900549291,
"grad_norm": 1.531005942756331,
"learning_rate": 1.9729664458962024e-05,
"loss": 1.7863,
"step": 289
},
{
"epoch": 0.0838392599017057,
"grad_norm": 1.3653225817314174,
"learning_rate": 1.9727541380497465e-05,
"loss": 1.7107,
"step": 290
},
{
"epoch": 0.08412836079791847,
"grad_norm": 1.3892591267410797,
"learning_rate": 1.972541011294959e-05,
"loss": 1.6403,
"step": 291
},
{
"epoch": 0.08441746169413125,
"grad_norm": 1.4118648874854935,
"learning_rate": 1.972327065811259e-05,
"loss": 1.6871,
"step": 292
},
{
"epoch": 0.08470656259034404,
"grad_norm": 1.4198103761024161,
"learning_rate": 1.972112301778756e-05,
"loss": 1.7095,
"step": 293
},
{
"epoch": 0.08499566348655681,
"grad_norm": 1.3896634687986935,
"learning_rate": 1.9718967193782475e-05,
"loss": 1.6347,
"step": 294
},
{
"epoch": 0.08528476438276959,
"grad_norm": 1.3431516951928908,
"learning_rate": 1.971680318791221e-05,
"loss": 1.7874,
"step": 295
},
{
"epoch": 0.08557386527898236,
"grad_norm": 1.3768126854675709,
"learning_rate": 1.9714631001998517e-05,
"loss": 1.6698,
"step": 296
},
{
"epoch": 0.08586296617519515,
"grad_norm": 1.452732246792645,
"learning_rate": 1.9712450637870048e-05,
"loss": 1.7896,
"step": 297
},
{
"epoch": 0.08615206707140792,
"grad_norm": 1.3268182615670008,
"learning_rate": 1.9710262097362323e-05,
"loss": 1.7162,
"step": 298
},
{
"epoch": 0.0864411679676207,
"grad_norm": 1.3451587005617647,
"learning_rate": 1.9708065382317763e-05,
"loss": 1.6132,
"step": 299
},
{
"epoch": 0.08673026886383348,
"grad_norm": 1.3567905514483718,
"learning_rate": 1.9705860494585653e-05,
"loss": 1.7181,
"step": 300
},
{
"epoch": 0.08701936976004626,
"grad_norm": 1.4178400258624095,
"learning_rate": 1.9703647436022174e-05,
"loss": 1.7197,
"step": 301
},
{
"epoch": 0.08730847065625903,
"grad_norm": 1.3176335169067535,
"learning_rate": 1.970142620849038e-05,
"loss": 1.6433,
"step": 302
},
{
"epoch": 0.08759757155247182,
"grad_norm": 1.3848318848405352,
"learning_rate": 1.9699196813860188e-05,
"loss": 1.7975,
"step": 303
},
{
"epoch": 0.08788667244868459,
"grad_norm": 1.4792413426843254,
"learning_rate": 1.9696959254008416e-05,
"loss": 1.7221,
"step": 304
},
{
"epoch": 0.08817577334489737,
"grad_norm": 1.3289471852066919,
"learning_rate": 1.9694713530818738e-05,
"loss": 1.7631,
"step": 305
},
{
"epoch": 0.08846487424111014,
"grad_norm": 1.4455763431259683,
"learning_rate": 1.9692459646181706e-05,
"loss": 1.758,
"step": 306
},
{
"epoch": 0.08875397513732293,
"grad_norm": 1.4156278432006244,
"learning_rate": 1.969019760199474e-05,
"loss": 1.7106,
"step": 307
},
{
"epoch": 0.0890430760335357,
"grad_norm": 1.335012985289393,
"learning_rate": 1.9687927400162134e-05,
"loss": 1.6303,
"step": 308
},
{
"epoch": 0.08933217692974849,
"grad_norm": 1.3515063736435737,
"learning_rate": 1.9685649042595045e-05,
"loss": 1.6907,
"step": 309
},
{
"epoch": 0.08962127782596126,
"grad_norm": 1.2906659764446389,
"learning_rate": 1.96833625312115e-05,
"loss": 1.7601,
"step": 310
},
{
"epoch": 0.08991037872217404,
"grad_norm": 1.4128720604920837,
"learning_rate": 1.968106786793638e-05,
"loss": 1.685,
"step": 311
},
{
"epoch": 0.09019947961838681,
"grad_norm": 1.3417671224975973,
"learning_rate": 1.967876505470144e-05,
"loss": 1.8098,
"step": 312
},
{
"epoch": 0.0904885805145996,
"grad_norm": 1.2524980838386586,
"learning_rate": 1.9676454093445293e-05,
"loss": 1.7347,
"step": 313
},
{
"epoch": 0.09077768141081237,
"grad_norm": 1.2903112596909276,
"learning_rate": 1.9674134986113407e-05,
"loss": 1.7183,
"step": 314
},
{
"epoch": 0.09106678230702515,
"grad_norm": 1.315346772447713,
"learning_rate": 1.9671807734658122e-05,
"loss": 1.666,
"step": 315
},
{
"epoch": 0.09135588320323793,
"grad_norm": 1.2815003343295048,
"learning_rate": 1.966947234103861e-05,
"loss": 1.6706,
"step": 316
},
{
"epoch": 0.09164498409945071,
"grad_norm": 1.3032057492518452,
"learning_rate": 1.9667128807220918e-05,
"loss": 1.6852,
"step": 317
},
{
"epoch": 0.09193408499566348,
"grad_norm": 1.399838923906023,
"learning_rate": 1.9664777135177936e-05,
"loss": 1.6786,
"step": 318
},
{
"epoch": 0.09222318589187627,
"grad_norm": 1.3183521495611081,
"learning_rate": 1.9662417326889408e-05,
"loss": 1.7656,
"step": 319
},
{
"epoch": 0.09251228678808904,
"grad_norm": 1.2919964291273878,
"learning_rate": 1.966004938434193e-05,
"loss": 1.7994,
"step": 320
},
{
"epoch": 0.09280138768430182,
"grad_norm": 1.331071824643648,
"learning_rate": 1.965767330952894e-05,
"loss": 1.7315,
"step": 321
},
{
"epoch": 0.0930904885805146,
"grad_norm": 1.3127780153887916,
"learning_rate": 1.9655289104450724e-05,
"loss": 1.58,
"step": 322
},
{
"epoch": 0.09337958947672738,
"grad_norm": 1.427592788785929,
"learning_rate": 1.9652896771114416e-05,
"loss": 1.6926,
"step": 323
},
{
"epoch": 0.09366869037294015,
"grad_norm": 1.3536283403103335,
"learning_rate": 1.9650496311533985e-05,
"loss": 1.6208,
"step": 324
},
{
"epoch": 0.09395779126915293,
"grad_norm": 1.3049249531971474,
"learning_rate": 1.9648087727730254e-05,
"loss": 1.7007,
"step": 325
},
{
"epoch": 0.0942468921653657,
"grad_norm": 8.074839924042823,
"learning_rate": 1.9645671021730876e-05,
"loss": 1.7401,
"step": 326
},
{
"epoch": 0.09453599306157849,
"grad_norm": 1.689958269781092,
"learning_rate": 1.9643246195570337e-05,
"loss": 1.7612,
"step": 327
},
{
"epoch": 0.09482509395779128,
"grad_norm": 1.3759192019092044,
"learning_rate": 1.9640813251289968e-05,
"loss": 1.6256,
"step": 328
},
{
"epoch": 0.09511419485400405,
"grad_norm": 1.2852662388113483,
"learning_rate": 1.9638372190937938e-05,
"loss": 1.5961,
"step": 329
},
{
"epoch": 0.09540329575021683,
"grad_norm": 1.3397296142416237,
"learning_rate": 1.963592301656923e-05,
"loss": 1.7228,
"step": 330
},
{
"epoch": 0.0956923966464296,
"grad_norm": 1.3050235554989664,
"learning_rate": 1.963346573024568e-05,
"loss": 1.631,
"step": 331
},
{
"epoch": 0.09598149754264239,
"grad_norm": 1.2687361679426568,
"learning_rate": 1.9631000334035936e-05,
"loss": 1.727,
"step": 332
},
{
"epoch": 0.09627059843885516,
"grad_norm": 1.4434478962876411,
"learning_rate": 1.962852683001548e-05,
"loss": 1.6118,
"step": 333
},
{
"epoch": 0.09655969933506794,
"grad_norm": 1.3835831592110455,
"learning_rate": 1.9626045220266626e-05,
"loss": 1.7184,
"step": 334
},
{
"epoch": 0.09684880023128072,
"grad_norm": 1.285345165652217,
"learning_rate": 1.96235555068785e-05,
"loss": 1.7396,
"step": 335
},
{
"epoch": 0.0971379011274935,
"grad_norm": 1.2741876410509978,
"learning_rate": 1.9621057691947052e-05,
"loss": 1.6827,
"step": 336
},
{
"epoch": 0.09742700202370627,
"grad_norm": 1.3414110861817643,
"learning_rate": 1.961855177757506e-05,
"loss": 1.7649,
"step": 337
},
{
"epoch": 0.09771610291991906,
"grad_norm": 1.3155148016487777,
"learning_rate": 1.9616037765872115e-05,
"loss": 1.6339,
"step": 338
},
{
"epoch": 0.09800520381613183,
"grad_norm": 1.3422815166211728,
"learning_rate": 1.9613515658954627e-05,
"loss": 1.6248,
"step": 339
},
{
"epoch": 0.09829430471234461,
"grad_norm": 1.2928851291446535,
"learning_rate": 1.9610985458945813e-05,
"loss": 1.6883,
"step": 340
},
{
"epoch": 0.09858340560855738,
"grad_norm": 1.310011271967797,
"learning_rate": 1.9608447167975716e-05,
"loss": 1.5808,
"step": 341
},
{
"epoch": 0.09887250650477017,
"grad_norm": 1.3065951865270455,
"learning_rate": 1.960590078818118e-05,
"loss": 1.6558,
"step": 342
},
{
"epoch": 0.09916160740098294,
"grad_norm": 1.3349828850162693,
"learning_rate": 1.960334632170587e-05,
"loss": 1.7289,
"step": 343
},
{
"epoch": 0.09945070829719573,
"grad_norm": 1.2943452424584643,
"learning_rate": 1.960078377070024e-05,
"loss": 1.613,
"step": 344
},
{
"epoch": 0.0997398091934085,
"grad_norm": 1.4861917307646695,
"learning_rate": 1.9598213137321568e-05,
"loss": 1.7105,
"step": 345
},
{
"epoch": 0.10002891008962128,
"grad_norm": 1.3722730686933504,
"learning_rate": 1.959563442373393e-05,
"loss": 1.7553,
"step": 346
},
{
"epoch": 0.10031801098583405,
"grad_norm": 1.3453966326217088,
"learning_rate": 1.95930476321082e-05,
"loss": 1.737,
"step": 347
},
{
"epoch": 0.10060711188204684,
"grad_norm": 1.3523784458893544,
"learning_rate": 1.9590452764622056e-05,
"loss": 1.6319,
"step": 348
},
{
"epoch": 0.10089621277825961,
"grad_norm": 1.3502487223737636,
"learning_rate": 1.9587849823459976e-05,
"loss": 1.6941,
"step": 349
},
{
"epoch": 0.1011853136744724,
"grad_norm": 1.398518128752155,
"learning_rate": 1.9585238810813233e-05,
"loss": 1.6475,
"step": 350
},
{
"epoch": 0.10147441457068516,
"grad_norm": 1.200679760577495,
"learning_rate": 1.9582619728879898e-05,
"loss": 1.7337,
"step": 351
},
{
"epoch": 0.10176351546689795,
"grad_norm": 1.3585311474803605,
"learning_rate": 1.9579992579864827e-05,
"loss": 1.7192,
"step": 352
},
{
"epoch": 0.10205261636311072,
"grad_norm": 1.4501829885257667,
"learning_rate": 1.9577357365979673e-05,
"loss": 1.7227,
"step": 353
},
{
"epoch": 0.1023417172593235,
"grad_norm": 1.2783896023412555,
"learning_rate": 1.9574714089442884e-05,
"loss": 1.7873,
"step": 354
},
{
"epoch": 0.10263081815553628,
"grad_norm": 1.2457865420249044,
"learning_rate": 1.9572062752479684e-05,
"loss": 1.6078,
"step": 355
},
{
"epoch": 0.10291991905174906,
"grad_norm": 1.298867064640292,
"learning_rate": 1.956940335732209e-05,
"loss": 1.7645,
"step": 356
},
{
"epoch": 0.10320901994796183,
"grad_norm": 1.2599727613590195,
"learning_rate": 1.95667359062089e-05,
"loss": 1.6698,
"step": 357
},
{
"epoch": 0.10349812084417462,
"grad_norm": 1.2508214829214046,
"learning_rate": 1.9564060401385697e-05,
"loss": 1.6279,
"step": 358
},
{
"epoch": 0.10378722174038739,
"grad_norm": 1.1832707796953907,
"learning_rate": 1.956137684510484e-05,
"loss": 1.5799,
"step": 359
},
{
"epoch": 0.10407632263660017,
"grad_norm": 1.3001383458399352,
"learning_rate": 1.9558685239625467e-05,
"loss": 1.6116,
"step": 360
},
{
"epoch": 0.10436542353281295,
"grad_norm": 1.2428470322542244,
"learning_rate": 1.9555985587213495e-05,
"loss": 1.6716,
"step": 361
},
{
"epoch": 0.10465452442902573,
"grad_norm": 1.3093773655524557,
"learning_rate": 1.955327789014161e-05,
"loss": 1.6915,
"step": 362
},
{
"epoch": 0.1049436253252385,
"grad_norm": 1.383021031949293,
"learning_rate": 1.9550562150689277e-05,
"loss": 1.7317,
"step": 363
},
{
"epoch": 0.10523272622145129,
"grad_norm": 1.329573981449402,
"learning_rate": 1.9547838371142726e-05,
"loss": 1.5913,
"step": 364
},
{
"epoch": 0.10552182711766407,
"grad_norm": 1.4166951368370655,
"learning_rate": 1.9545106553794962e-05,
"loss": 1.6004,
"step": 365
},
{
"epoch": 0.10581092801387684,
"grad_norm": 1.3377620749480776,
"learning_rate": 1.9542366700945746e-05,
"loss": 1.7907,
"step": 366
},
{
"epoch": 0.10610002891008963,
"grad_norm": 1.3388694333443414,
"learning_rate": 1.953961881490161e-05,
"loss": 1.6979,
"step": 367
},
{
"epoch": 0.1063891298063024,
"grad_norm": 1.3581005844418126,
"learning_rate": 1.9536862897975852e-05,
"loss": 1.6602,
"step": 368
},
{
"epoch": 0.10667823070251518,
"grad_norm": 1.3242684005167475,
"learning_rate": 1.9534098952488527e-05,
"loss": 1.7131,
"step": 369
},
{
"epoch": 0.10696733159872795,
"grad_norm": 1.2765833725967193,
"learning_rate": 1.9531326980766444e-05,
"loss": 1.7917,
"step": 370
},
{
"epoch": 0.10725643249494074,
"grad_norm": 1.2957317184999853,
"learning_rate": 1.952854698514318e-05,
"loss": 1.6569,
"step": 371
},
{
"epoch": 0.10754553339115351,
"grad_norm": 1.3050908294406351,
"learning_rate": 1.952575896795906e-05,
"loss": 1.5815,
"step": 372
},
{
"epoch": 0.1078346342873663,
"grad_norm": 1.4007267378768093,
"learning_rate": 1.952296293156116e-05,
"loss": 1.6437,
"step": 373
},
{
"epoch": 0.10812373518357907,
"grad_norm": 1.288112071905893,
"learning_rate": 1.952015887830331e-05,
"loss": 1.7148,
"step": 374
},
{
"epoch": 0.10841283607979185,
"grad_norm": 1.4132340648150645,
"learning_rate": 1.9517346810546094e-05,
"loss": 1.7236,
"step": 375
},
{
"epoch": 0.10870193697600462,
"grad_norm": 1.3520649123502502,
"learning_rate": 1.951452673065683e-05,
"loss": 1.7027,
"step": 376
},
{
"epoch": 0.10899103787221741,
"grad_norm": 1.344289195938231,
"learning_rate": 1.951169864100959e-05,
"loss": 1.6753,
"step": 377
},
{
"epoch": 0.10928013876843018,
"grad_norm": 1.2727655897790644,
"learning_rate": 1.950886254398519e-05,
"loss": 1.7595,
"step": 378
},
{
"epoch": 0.10956923966464296,
"grad_norm": 1.3488163579726409,
"learning_rate": 1.9506018441971186e-05,
"loss": 1.6687,
"step": 379
},
{
"epoch": 0.10985834056085574,
"grad_norm": 1.3474523993403258,
"learning_rate": 1.9503166337361863e-05,
"loss": 1.6676,
"step": 380
},
{
"epoch": 0.11014744145706852,
"grad_norm": 1.2758765009101956,
"learning_rate": 1.9500306232558263e-05,
"loss": 1.6571,
"step": 381
},
{
"epoch": 0.11043654235328129,
"grad_norm": 1.2196542542467945,
"learning_rate": 1.9497438129968144e-05,
"loss": 1.6551,
"step": 382
},
{
"epoch": 0.11072564324949408,
"grad_norm": 1.301338171513902,
"learning_rate": 1.9494562032006e-05,
"loss": 1.6548,
"step": 383
},
{
"epoch": 0.11101474414570685,
"grad_norm": 1.3203283874481997,
"learning_rate": 1.9491677941093075e-05,
"loss": 1.7338,
"step": 384
},
{
"epoch": 0.11130384504191963,
"grad_norm": 1.3024684784620577,
"learning_rate": 1.9488785859657314e-05,
"loss": 1.5528,
"step": 385
},
{
"epoch": 0.1115929459381324,
"grad_norm": 1.2625026147962017,
"learning_rate": 1.9485885790133402e-05,
"loss": 1.6967,
"step": 386
},
{
"epoch": 0.11188204683434519,
"grad_norm": 1.49068356270609,
"learning_rate": 1.9482977734962753e-05,
"loss": 1.7096,
"step": 387
},
{
"epoch": 0.11217114773055796,
"grad_norm": 1.3130185709588313,
"learning_rate": 1.9480061696593502e-05,
"loss": 1.7174,
"step": 388
},
{
"epoch": 0.11246024862677075,
"grad_norm": 1.2742915647508892,
"learning_rate": 1.9477137677480493e-05,
"loss": 1.6696,
"step": 389
},
{
"epoch": 0.11274934952298352,
"grad_norm": 1.3627324489936457,
"learning_rate": 1.9474205680085302e-05,
"loss": 1.6542,
"step": 390
},
{
"epoch": 0.1130384504191963,
"grad_norm": 1.3437349437643114,
"learning_rate": 1.947126570687622e-05,
"loss": 1.6747,
"step": 391
},
{
"epoch": 0.11332755131540907,
"grad_norm": 1.2678997121287945,
"learning_rate": 1.9468317760328244e-05,
"loss": 1.6918,
"step": 392
},
{
"epoch": 0.11361665221162186,
"grad_norm": 1.2929245484043537,
"learning_rate": 1.9465361842923087e-05,
"loss": 1.7217,
"step": 393
},
{
"epoch": 0.11390575310783463,
"grad_norm": 1.2499932038294317,
"learning_rate": 1.9462397957149182e-05,
"loss": 1.6128,
"step": 394
},
{
"epoch": 0.11419485400404741,
"grad_norm": 1.221557019735438,
"learning_rate": 1.9459426105501653e-05,
"loss": 1.6467,
"step": 395
},
{
"epoch": 0.11448395490026018,
"grad_norm": 1.2693539163839065,
"learning_rate": 1.945644629048234e-05,
"loss": 1.7283,
"step": 396
},
{
"epoch": 0.11477305579647297,
"grad_norm": 1.21274399309353,
"learning_rate": 1.9453458514599794e-05,
"loss": 1.7008,
"step": 397
},
{
"epoch": 0.11506215669268574,
"grad_norm": 1.2218147838259905,
"learning_rate": 1.945046278036925e-05,
"loss": 1.7437,
"step": 398
},
{
"epoch": 0.11535125758889853,
"grad_norm": 1.2968685896469103,
"learning_rate": 1.9447459090312656e-05,
"loss": 1.6983,
"step": 399
},
{
"epoch": 0.1156403584851113,
"grad_norm": 1.2334993392446711,
"learning_rate": 1.9444447446958658e-05,
"loss": 1.7061,
"step": 400
},
{
"epoch": 0.11592945938132408,
"grad_norm": 1.2984129611641557,
"learning_rate": 1.9441427852842584e-05,
"loss": 1.663,
"step": 401
},
{
"epoch": 0.11621856027753687,
"grad_norm": 1.2475965365995239,
"learning_rate": 1.943840031050647e-05,
"loss": 1.6427,
"step": 402
},
{
"epoch": 0.11650766117374964,
"grad_norm": 1.2694332854835886,
"learning_rate": 1.943536482249904e-05,
"loss": 1.7047,
"step": 403
},
{
"epoch": 0.11679676206996242,
"grad_norm": 1.2646968431962804,
"learning_rate": 1.94323213913757e-05,
"loss": 1.6885,
"step": 404
},
{
"epoch": 0.1170858629661752,
"grad_norm": 1.2293159012021218,
"learning_rate": 1.9429270019698553e-05,
"loss": 1.757,
"step": 405
},
{
"epoch": 0.11737496386238798,
"grad_norm": 1.2484085036943657,
"learning_rate": 1.9426210710036374e-05,
"loss": 1.6741,
"step": 406
},
{
"epoch": 0.11766406475860075,
"grad_norm": 1.43839701132394,
"learning_rate": 1.9423143464964633e-05,
"loss": 1.6768,
"step": 407
},
{
"epoch": 0.11795316565481354,
"grad_norm": 1.292908155403034,
"learning_rate": 1.9420068287065476e-05,
"loss": 1.6126,
"step": 408
},
{
"epoch": 0.1182422665510263,
"grad_norm": 1.370089960450461,
"learning_rate": 1.9416985178927724e-05,
"loss": 1.731,
"step": 409
},
{
"epoch": 0.11853136744723909,
"grad_norm": 1.3262176752311388,
"learning_rate": 1.941389414314688e-05,
"loss": 1.6579,
"step": 410
},
{
"epoch": 0.11882046834345186,
"grad_norm": 1.339305591546624,
"learning_rate": 1.9410795182325113e-05,
"loss": 1.7495,
"step": 411
},
{
"epoch": 0.11910956923966465,
"grad_norm": 1.341821261680916,
"learning_rate": 1.940768829907127e-05,
"loss": 1.7005,
"step": 412
},
{
"epoch": 0.11939867013587742,
"grad_norm": 1.276085151807024,
"learning_rate": 1.9404573496000867e-05,
"loss": 1.6043,
"step": 413
},
{
"epoch": 0.1196877710320902,
"grad_norm": 1.302570146214278,
"learning_rate": 1.940145077573608e-05,
"loss": 1.6722,
"step": 414
},
{
"epoch": 0.11997687192830297,
"grad_norm": 1.2582811301580585,
"learning_rate": 1.9398320140905765e-05,
"loss": 1.6383,
"step": 415
},
{
"epoch": 0.12026597282451576,
"grad_norm": 1.258733310784144,
"learning_rate": 1.9395181594145428e-05,
"loss": 1.5806,
"step": 416
},
{
"epoch": 0.12055507372072853,
"grad_norm": 1.2988890119500425,
"learning_rate": 1.9392035138097235e-05,
"loss": 1.7048,
"step": 417
},
{
"epoch": 0.12084417461694132,
"grad_norm": 1.29122939937491,
"learning_rate": 1.938888077541002e-05,
"loss": 1.5844,
"step": 418
},
{
"epoch": 0.12113327551315409,
"grad_norm": 1.2756402426612268,
"learning_rate": 1.9385718508739263e-05,
"loss": 1.6935,
"step": 419
},
{
"epoch": 0.12142237640936687,
"grad_norm": 1.3542547094512076,
"learning_rate": 1.9382548340747105e-05,
"loss": 1.6781,
"step": 420
},
{
"epoch": 0.12171147730557964,
"grad_norm": 1.3120014515081435,
"learning_rate": 1.937937027410234e-05,
"loss": 1.6601,
"step": 421
},
{
"epoch": 0.12200057820179243,
"grad_norm": 1.2955651895370563,
"learning_rate": 1.9376184311480404e-05,
"loss": 1.59,
"step": 422
},
{
"epoch": 0.1222896790980052,
"grad_norm": 1.3192882851086605,
"learning_rate": 1.9372990455563384e-05,
"loss": 1.6291,
"step": 423
},
{
"epoch": 0.12257877999421798,
"grad_norm": 1.4080370752052034,
"learning_rate": 1.9369788709040014e-05,
"loss": 1.5843,
"step": 424
},
{
"epoch": 0.12286788089043076,
"grad_norm": 1.309890861391924,
"learning_rate": 1.936657907460567e-05,
"loss": 1.7576,
"step": 425
},
{
"epoch": 0.12315698178664354,
"grad_norm": 1.3296661949737203,
"learning_rate": 1.936336155496236e-05,
"loss": 1.6412,
"step": 426
},
{
"epoch": 0.12344608268285631,
"grad_norm": 1.2137542022109604,
"learning_rate": 1.9360136152818746e-05,
"loss": 1.6677,
"step": 427
},
{
"epoch": 0.1237351835790691,
"grad_norm": 1.304478996513997,
"learning_rate": 1.9356902870890114e-05,
"loss": 1.6157,
"step": 428
},
{
"epoch": 0.12402428447528187,
"grad_norm": 1.2362598408529617,
"learning_rate": 1.9353661711898382e-05,
"loss": 1.6493,
"step": 429
},
{
"epoch": 0.12431338537149465,
"grad_norm": 1.3868990501411094,
"learning_rate": 1.9350412678572114e-05,
"loss": 1.7838,
"step": 430
},
{
"epoch": 0.12460248626770742,
"grad_norm": 1.3811144656508765,
"learning_rate": 1.934715577364648e-05,
"loss": 1.6164,
"step": 431
},
{
"epoch": 0.12489158716392021,
"grad_norm": 1.1886802269263785,
"learning_rate": 1.93438909998633e-05,
"loss": 1.6917,
"step": 432
},
{
"epoch": 0.125180688060133,
"grad_norm": 1.373861158324828,
"learning_rate": 1.9340618359971003e-05,
"loss": 1.7273,
"step": 433
},
{
"epoch": 0.12546978895634575,
"grad_norm": 1.360887411910304,
"learning_rate": 1.9337337856724647e-05,
"loss": 1.6129,
"step": 434
},
{
"epoch": 0.12575888985255854,
"grad_norm": 1.2310796622388105,
"learning_rate": 1.9334049492885906e-05,
"loss": 1.686,
"step": 435
},
{
"epoch": 0.12604799074877132,
"grad_norm": 1.2150119003782842,
"learning_rate": 1.9330753271223073e-05,
"loss": 1.7097,
"step": 436
},
{
"epoch": 0.1263370916449841,
"grad_norm": 1.3137735986745411,
"learning_rate": 1.932744919451106e-05,
"loss": 1.7312,
"step": 437
},
{
"epoch": 0.1266261925411969,
"grad_norm": 1.19581010090534,
"learning_rate": 1.9324137265531383e-05,
"loss": 1.7073,
"step": 438
},
{
"epoch": 0.12691529343740965,
"grad_norm": 1.1864958252257658,
"learning_rate": 1.9320817487072174e-05,
"loss": 1.6634,
"step": 439
},
{
"epoch": 0.12720439433362243,
"grad_norm": 1.2519130079679632,
"learning_rate": 1.9317489861928173e-05,
"loss": 1.6686,
"step": 440
},
{
"epoch": 0.12749349522983522,
"grad_norm": 1.2098081719210536,
"learning_rate": 1.9314154392900728e-05,
"loss": 1.697,
"step": 441
},
{
"epoch": 0.127782596126048,
"grad_norm": 1.297112805486722,
"learning_rate": 1.9310811082797784e-05,
"loss": 1.5979,
"step": 442
},
{
"epoch": 0.12807169702226076,
"grad_norm": 1.3006902970607843,
"learning_rate": 1.930745993443389e-05,
"loss": 1.5987,
"step": 443
},
{
"epoch": 0.12836079791847355,
"grad_norm": 1.3888684660127295,
"learning_rate": 1.9304100950630197e-05,
"loss": 1.6047,
"step": 444
},
{
"epoch": 0.12864989881468633,
"grad_norm": 1.3325945840074427,
"learning_rate": 1.9300734134214443e-05,
"loss": 1.6813,
"step": 445
},
{
"epoch": 0.12893899971089912,
"grad_norm": 1.3053169239838074,
"learning_rate": 1.9297359488020974e-05,
"loss": 1.8416,
"step": 446
},
{
"epoch": 0.12922810060711187,
"grad_norm": 1.1934876374596244,
"learning_rate": 1.9293977014890716e-05,
"loss": 1.7005,
"step": 447
},
{
"epoch": 0.12951720150332466,
"grad_norm": 1.2640391776060138,
"learning_rate": 1.929058671767119e-05,
"loss": 1.6578,
"step": 448
},
{
"epoch": 0.12980630239953744,
"grad_norm": 1.2646711203792806,
"learning_rate": 1.9287188599216492e-05,
"loss": 1.6699,
"step": 449
},
{
"epoch": 0.13009540329575023,
"grad_norm": 1.2320011875752572,
"learning_rate": 1.928378266238732e-05,
"loss": 1.6525,
"step": 450
},
{
"epoch": 0.13038450419196299,
"grad_norm": 1.34573408732214,
"learning_rate": 1.9280368910050943e-05,
"loss": 1.7015,
"step": 451
},
{
"epoch": 0.13067360508817577,
"grad_norm": 1.2353671099343633,
"learning_rate": 1.9276947345081213e-05,
"loss": 1.6397,
"step": 452
},
{
"epoch": 0.13096270598438856,
"grad_norm": 1.2197831661100311,
"learning_rate": 1.9273517970358557e-05,
"loss": 1.6836,
"step": 453
},
{
"epoch": 0.13125180688060134,
"grad_norm": 1.328367741401711,
"learning_rate": 1.9270080788769978e-05,
"loss": 1.7178,
"step": 454
},
{
"epoch": 0.1315409077768141,
"grad_norm": 1.2668567068996344,
"learning_rate": 1.9266635803209047e-05,
"loss": 1.7255,
"step": 455
},
{
"epoch": 0.13183000867302688,
"grad_norm": 1.2322035718739746,
"learning_rate": 1.9263183016575912e-05,
"loss": 1.612,
"step": 456
},
{
"epoch": 0.13211910956923967,
"grad_norm": 1.244606536596135,
"learning_rate": 1.9259722431777286e-05,
"loss": 1.7994,
"step": 457
},
{
"epoch": 0.13240821046545245,
"grad_norm": 1.2384126698619529,
"learning_rate": 1.9256254051726443e-05,
"loss": 1.6401,
"step": 458
},
{
"epoch": 0.1326973113616652,
"grad_norm": 1.2780953635907069,
"learning_rate": 1.925277787934322e-05,
"loss": 1.5826,
"step": 459
},
{
"epoch": 0.132986412257878,
"grad_norm": 1.2798396535904,
"learning_rate": 1.9249293917554017e-05,
"loss": 1.674,
"step": 460
},
{
"epoch": 0.13327551315409078,
"grad_norm": 1.2799203698146353,
"learning_rate": 1.924580216929179e-05,
"loss": 1.6331,
"step": 461
},
{
"epoch": 0.13356461405030357,
"grad_norm": 1.2111633406900368,
"learning_rate": 1.9242302637496057e-05,
"loss": 1.6682,
"step": 462
},
{
"epoch": 0.13385371494651632,
"grad_norm": 1.2112859487688281,
"learning_rate": 1.9238795325112867e-05,
"loss": 1.6484,
"step": 463
},
{
"epoch": 0.1341428158427291,
"grad_norm": 1.288135140665184,
"learning_rate": 1.9235280235094852e-05,
"loss": 1.626,
"step": 464
},
{
"epoch": 0.1344319167389419,
"grad_norm": 1.308513915152716,
"learning_rate": 1.923175737040116e-05,
"loss": 1.6597,
"step": 465
},
{
"epoch": 0.13472101763515468,
"grad_norm": 1.3262569041157928,
"learning_rate": 1.9228226733997503e-05,
"loss": 1.7642,
"step": 466
},
{
"epoch": 0.13501011853136743,
"grad_norm": 1.3424290380477188,
"learning_rate": 1.9224688328856127e-05,
"loss": 1.7465,
"step": 467
},
{
"epoch": 0.13529921942758022,
"grad_norm": 1.2651279914803906,
"learning_rate": 1.9221142157955825e-05,
"loss": 1.6126,
"step": 468
},
{
"epoch": 0.135588320323793,
"grad_norm": 1.3558655043001309,
"learning_rate": 1.921758822428192e-05,
"loss": 1.7556,
"step": 469
},
{
"epoch": 0.1358774212200058,
"grad_norm": 1.402001055732072,
"learning_rate": 1.921402653082628e-05,
"loss": 1.7992,
"step": 470
},
{
"epoch": 0.13616652211621855,
"grad_norm": 1.3182689558852307,
"learning_rate": 1.9210457080587285e-05,
"loss": 1.6262,
"step": 471
},
{
"epoch": 0.13645562301243133,
"grad_norm": 1.2723760551262082,
"learning_rate": 1.9206879876569877e-05,
"loss": 1.7064,
"step": 472
},
{
"epoch": 0.13674472390864412,
"grad_norm": 1.2954400935028445,
"learning_rate": 1.92032949217855e-05,
"loss": 1.6374,
"step": 473
},
{
"epoch": 0.1370338248048569,
"grad_norm": 1.2402948267042866,
"learning_rate": 1.9199702219252128e-05,
"loss": 1.5951,
"step": 474
},
{
"epoch": 0.1373229257010697,
"grad_norm": 1.2739630063945762,
"learning_rate": 1.919610177199426e-05,
"loss": 1.7167,
"step": 475
},
{
"epoch": 0.13761202659728244,
"grad_norm": 1.4113213475938031,
"learning_rate": 1.9192493583042922e-05,
"loss": 1.5751,
"step": 476
},
{
"epoch": 0.13790112749349523,
"grad_norm": 1.2872821986905296,
"learning_rate": 1.9188877655435645e-05,
"loss": 1.65,
"step": 477
},
{
"epoch": 0.13819022838970801,
"grad_norm": 1.2480655610937714,
"learning_rate": 1.918525399221648e-05,
"loss": 1.6882,
"step": 478
},
{
"epoch": 0.1384793292859208,
"grad_norm": 1.2681694886992483,
"learning_rate": 1.9181622596435993e-05,
"loss": 1.7528,
"step": 479
},
{
"epoch": 0.13876843018213356,
"grad_norm": 1.226404564427387,
"learning_rate": 1.917798347115125e-05,
"loss": 1.5918,
"step": 480
},
{
"epoch": 0.13905753107834634,
"grad_norm": 1.2687390154564733,
"learning_rate": 1.9174336619425842e-05,
"loss": 1.7533,
"step": 481
},
{
"epoch": 0.13934663197455913,
"grad_norm": 1.2254053559809934,
"learning_rate": 1.9170682044329845e-05,
"loss": 1.5968,
"step": 482
},
{
"epoch": 0.1396357328707719,
"grad_norm": 1.2732869875336201,
"learning_rate": 1.9167019748939847e-05,
"loss": 1.6944,
"step": 483
},
{
"epoch": 0.13992483376698467,
"grad_norm": 1.2541243190490785,
"learning_rate": 1.9163349736338935e-05,
"loss": 1.6237,
"step": 484
},
{
"epoch": 0.14021393466319745,
"grad_norm": 1.3291823171118335,
"learning_rate": 1.915967200961669e-05,
"loss": 1.5639,
"step": 485
},
{
"epoch": 0.14050303555941024,
"grad_norm": 1.2528167731427482,
"learning_rate": 1.9155986571869184e-05,
"loss": 1.7458,
"step": 486
},
{
"epoch": 0.14079213645562302,
"grad_norm": 1.2107095174484985,
"learning_rate": 1.9152293426198997e-05,
"loss": 1.6838,
"step": 487
},
{
"epoch": 0.14108123735183578,
"grad_norm": 1.2070210238330805,
"learning_rate": 1.9148592575715175e-05,
"loss": 1.6957,
"step": 488
},
{
"epoch": 0.14137033824804857,
"grad_norm": 1.3501466427051918,
"learning_rate": 1.9144884023533262e-05,
"loss": 1.6848,
"step": 489
},
{
"epoch": 0.14165943914426135,
"grad_norm": 1.289837942023481,
"learning_rate": 1.914116777277529e-05,
"loss": 1.6398,
"step": 490
},
{
"epoch": 0.14194854004047414,
"grad_norm": 1.2072886210991958,
"learning_rate": 1.9137443826569758e-05,
"loss": 1.6803,
"step": 491
},
{
"epoch": 0.1422376409366869,
"grad_norm": 1.2261256594899907,
"learning_rate": 1.9133712188051664e-05,
"loss": 1.8143,
"step": 492
},
{
"epoch": 0.14252674183289968,
"grad_norm": 1.191555412915695,
"learning_rate": 1.912997286036246e-05,
"loss": 1.7273,
"step": 493
},
{
"epoch": 0.14281584272911246,
"grad_norm": 1.2668769555610104,
"learning_rate": 1.9126225846650083e-05,
"loss": 1.6729,
"step": 494
},
{
"epoch": 0.14310494362532525,
"grad_norm": 1.2310680992910144,
"learning_rate": 1.912247115006894e-05,
"loss": 1.5509,
"step": 495
},
{
"epoch": 0.143394044521538,
"grad_norm": 1.2540456207457256,
"learning_rate": 1.91187087737799e-05,
"loss": 1.6512,
"step": 496
},
{
"epoch": 0.1436831454177508,
"grad_norm": 1.2983726556931725,
"learning_rate": 1.9114938720950307e-05,
"loss": 1.6434,
"step": 497
},
{
"epoch": 0.14397224631396358,
"grad_norm": 1.2424942795398506,
"learning_rate": 1.9111160994753957e-05,
"loss": 1.712,
"step": 498
},
{
"epoch": 0.14426134721017636,
"grad_norm": 1.2549250776078968,
"learning_rate": 1.9107375598371112e-05,
"loss": 1.5818,
"step": 499
},
{
"epoch": 0.14455044810638912,
"grad_norm": 1.171699859687026,
"learning_rate": 1.910358253498849e-05,
"loss": 1.6075,
"step": 500
},
{
"epoch": 0.1448395490026019,
"grad_norm": 1.2660287905231888,
"learning_rate": 1.9099781807799263e-05,
"loss": 1.6403,
"step": 501
},
{
"epoch": 0.1451286498988147,
"grad_norm": 1.2138215200444276,
"learning_rate": 1.9095973420003056e-05,
"loss": 1.6214,
"step": 502
},
{
"epoch": 0.14541775079502747,
"grad_norm": 1.2379532082411215,
"learning_rate": 1.909215737480594e-05,
"loss": 1.5915,
"step": 503
},
{
"epoch": 0.14570685169124023,
"grad_norm": 1.2691642883216934,
"learning_rate": 1.9088333675420432e-05,
"loss": 1.6913,
"step": 504
},
{
"epoch": 0.14599595258745302,
"grad_norm": 1.3365845490222084,
"learning_rate": 1.9084502325065494e-05,
"loss": 1.7174,
"step": 505
},
{
"epoch": 0.1462850534836658,
"grad_norm": 1.2933988368966916,
"learning_rate": 1.9080663326966536e-05,
"loss": 1.6947,
"step": 506
},
{
"epoch": 0.14657415437987859,
"grad_norm": 1.1997391314057113,
"learning_rate": 1.9076816684355398e-05,
"loss": 1.6454,
"step": 507
},
{
"epoch": 0.14686325527609134,
"grad_norm": 1.2253205782269874,
"learning_rate": 1.9072962400470348e-05,
"loss": 1.6475,
"step": 508
},
{
"epoch": 0.14715235617230413,
"grad_norm": 1.149822466017807,
"learning_rate": 1.9069100478556112e-05,
"loss": 1.5374,
"step": 509
},
{
"epoch": 0.1474414570685169,
"grad_norm": 1.2247546090753016,
"learning_rate": 1.906523092186382e-05,
"loss": 1.7436,
"step": 510
},
{
"epoch": 0.1477305579647297,
"grad_norm": 1.1999484360154251,
"learning_rate": 1.9061353733651036e-05,
"loss": 1.6747,
"step": 511
},
{
"epoch": 0.14801965886094248,
"grad_norm": 1.1947780812062576,
"learning_rate": 1.905746891718176e-05,
"loss": 1.59,
"step": 512
},
{
"epoch": 0.14830875975715524,
"grad_norm": 1.2483481364125735,
"learning_rate": 1.9053576475726406e-05,
"loss": 1.6343,
"step": 513
},
{
"epoch": 0.14859786065336802,
"grad_norm": 1.2303596001336339,
"learning_rate": 1.9049676412561803e-05,
"loss": 1.701,
"step": 514
},
{
"epoch": 0.1488869615495808,
"grad_norm": 1.2116845736311206,
"learning_rate": 1.9045768730971198e-05,
"loss": 1.7507,
"step": 515
},
{
"epoch": 0.1491760624457936,
"grad_norm": 1.299089146099033,
"learning_rate": 1.904185343424426e-05,
"loss": 1.7314,
"step": 516
},
{
"epoch": 0.14946516334200635,
"grad_norm": 1.2754215966625346,
"learning_rate": 1.9037930525677058e-05,
"loss": 1.6955,
"step": 517
},
{
"epoch": 0.14975426423821914,
"grad_norm": 1.2864098329743727,
"learning_rate": 1.9034000008572073e-05,
"loss": 1.6926,
"step": 518
},
{
"epoch": 0.15004336513443192,
"grad_norm": 1.2920675731077655,
"learning_rate": 1.9030061886238195e-05,
"loss": 1.6528,
"step": 519
},
{
"epoch": 0.1503324660306447,
"grad_norm": 1.2265090425552296,
"learning_rate": 1.9026116161990712e-05,
"loss": 1.647,
"step": 520
},
{
"epoch": 0.15062156692685746,
"grad_norm": 1.305532335335852,
"learning_rate": 1.902216283915131e-05,
"loss": 1.6085,
"step": 521
},
{
"epoch": 0.15091066782307025,
"grad_norm": 1.2996647218299158,
"learning_rate": 1.901820192104808e-05,
"loss": 1.6858,
"step": 522
},
{
"epoch": 0.15119976871928303,
"grad_norm": 1.2728823004232528,
"learning_rate": 1.9014233411015496e-05,
"loss": 1.6152,
"step": 523
},
{
"epoch": 0.15148886961549582,
"grad_norm": 1.237846119035289,
"learning_rate": 1.901025731239443e-05,
"loss": 1.7852,
"step": 524
},
{
"epoch": 0.15177797051170858,
"grad_norm": 1.263777772165666,
"learning_rate": 1.9006273628532146e-05,
"loss": 1.6714,
"step": 525
},
{
"epoch": 0.15206707140792136,
"grad_norm": 1.1966029829151998,
"learning_rate": 1.9002282362782284e-05,
"loss": 1.5772,
"step": 526
},
{
"epoch": 0.15235617230413415,
"grad_norm": 1.2573815442286682,
"learning_rate": 1.8998283518504874e-05,
"loss": 1.6586,
"step": 527
},
{
"epoch": 0.15264527320034693,
"grad_norm": 1.3315550643097303,
"learning_rate": 1.899427709906632e-05,
"loss": 1.5776,
"step": 528
},
{
"epoch": 0.1529343740965597,
"grad_norm": 1.27455167369078,
"learning_rate": 1.899026310783941e-05,
"loss": 1.6383,
"step": 529
},
{
"epoch": 0.15322347499277247,
"grad_norm": 1.2177716057481967,
"learning_rate": 1.8986241548203305e-05,
"loss": 1.7259,
"step": 530
},
{
"epoch": 0.15351257588898526,
"grad_norm": 1.4172388350158833,
"learning_rate": 1.898221242354353e-05,
"loss": 1.6084,
"step": 531
},
{
"epoch": 0.15380167678519804,
"grad_norm": 1.2977973632114062,
"learning_rate": 1.8978175737251984e-05,
"loss": 1.7148,
"step": 532
},
{
"epoch": 0.1540907776814108,
"grad_norm": 1.3288320048402222,
"learning_rate": 1.897413149272694e-05,
"loss": 1.7092,
"step": 533
},
{
"epoch": 0.15437987857762359,
"grad_norm": 1.362022094342137,
"learning_rate": 1.8970079693373014e-05,
"loss": 1.6727,
"step": 534
},
{
"epoch": 0.15466897947383637,
"grad_norm": 1.2868416836773502,
"learning_rate": 1.8966020342601204e-05,
"loss": 1.6544,
"step": 535
},
{
"epoch": 0.15495808037004916,
"grad_norm": 1.2255959793529978,
"learning_rate": 1.8961953443828853e-05,
"loss": 1.595,
"step": 536
},
{
"epoch": 0.1552471812662619,
"grad_norm": 1.2265187293683357,
"learning_rate": 1.8957879000479647e-05,
"loss": 1.6067,
"step": 537
},
{
"epoch": 0.1555362821624747,
"grad_norm": 1.198940426728034,
"learning_rate": 1.8953797015983654e-05,
"loss": 1.6146,
"step": 538
},
{
"epoch": 0.15582538305868748,
"grad_norm": 1.190556858810557,
"learning_rate": 1.8949707493777262e-05,
"loss": 1.699,
"step": 539
},
{
"epoch": 0.15611448395490027,
"grad_norm": 1.1336765732790504,
"learning_rate": 1.894561043730322e-05,
"loss": 1.5966,
"step": 540
},
{
"epoch": 0.15640358485111303,
"grad_norm": 1.1667137603744793,
"learning_rate": 1.8941505850010612e-05,
"loss": 1.6644,
"step": 541
},
{
"epoch": 0.1566926857473258,
"grad_norm": 1.3524401243741684,
"learning_rate": 1.8937393735354865e-05,
"loss": 1.6905,
"step": 542
},
{
"epoch": 0.1569817866435386,
"grad_norm": 1.2665686572396084,
"learning_rate": 1.8933274096797745e-05,
"loss": 1.5214,
"step": 543
},
{
"epoch": 0.15727088753975138,
"grad_norm": 1.2333567103961114,
"learning_rate": 1.892914693780735e-05,
"loss": 1.6044,
"step": 544
},
{
"epoch": 0.15755998843596414,
"grad_norm": 1.2700672132910338,
"learning_rate": 1.8925012261858103e-05,
"loss": 1.6295,
"step": 545
},
{
"epoch": 0.15784908933217692,
"grad_norm": 1.3269513720335189,
"learning_rate": 1.892087007243077e-05,
"loss": 1.5688,
"step": 546
},
{
"epoch": 0.1581381902283897,
"grad_norm": 1.444704489940057,
"learning_rate": 1.8916720373012425e-05,
"loss": 1.6048,
"step": 547
},
{
"epoch": 0.1584272911246025,
"grad_norm": 1.2362771720222352,
"learning_rate": 1.8912563167096478e-05,
"loss": 1.6548,
"step": 548
},
{
"epoch": 0.15871639202081528,
"grad_norm": 1.3841466139534755,
"learning_rate": 1.8908398458182647e-05,
"loss": 1.5571,
"step": 549
},
{
"epoch": 0.15900549291702804,
"grad_norm": 1.3571467399932051,
"learning_rate": 1.8904226249776977e-05,
"loss": 1.741,
"step": 550
},
{
"epoch": 0.15929459381324082,
"grad_norm": 1.3013296148852647,
"learning_rate": 1.8900046545391817e-05,
"loss": 1.6925,
"step": 551
},
{
"epoch": 0.1595836947094536,
"grad_norm": 1.4245052366468887,
"learning_rate": 1.8895859348545832e-05,
"loss": 1.6373,
"step": 552
},
{
"epoch": 0.1598727956056664,
"grad_norm": 1.4862334656509957,
"learning_rate": 1.8891664662763998e-05,
"loss": 1.7684,
"step": 553
},
{
"epoch": 0.16016189650187915,
"grad_norm": 1.4591040164361495,
"learning_rate": 1.8887462491577585e-05,
"loss": 1.6048,
"step": 554
},
{
"epoch": 0.16045099739809193,
"grad_norm": 1.3905790801269649,
"learning_rate": 1.8883252838524167e-05,
"loss": 1.7057,
"step": 555
},
{
"epoch": 0.16074009829430472,
"grad_norm": 1.296590703552253,
"learning_rate": 1.8879035707147634e-05,
"loss": 1.5708,
"step": 556
},
{
"epoch": 0.1610291991905175,
"grad_norm": 1.4224940976099907,
"learning_rate": 1.887481110099814e-05,
"loss": 1.6613,
"step": 557
},
{
"epoch": 0.16131830008673026,
"grad_norm": 1.209416335942298,
"learning_rate": 1.8870579023632163e-05,
"loss": 1.6734,
"step": 558
},
{
"epoch": 0.16160740098294304,
"grad_norm": 1.3856482467534985,
"learning_rate": 1.886633947861245e-05,
"loss": 1.6075,
"step": 559
},
{
"epoch": 0.16189650187915583,
"grad_norm": 1.298197860303536,
"learning_rate": 1.8862092469508043e-05,
"loss": 1.7309,
"step": 560
},
{
"epoch": 0.16218560277536861,
"grad_norm": 1.2884008702670706,
"learning_rate": 1.8857837999894265e-05,
"loss": 1.7367,
"step": 561
},
{
"epoch": 0.16247470367158137,
"grad_norm": 1.3295149830237742,
"learning_rate": 1.8853576073352724e-05,
"loss": 1.6373,
"step": 562
},
{
"epoch": 0.16276380456779416,
"grad_norm": 1.244943878603924,
"learning_rate": 1.8849306693471297e-05,
"loss": 1.7586,
"step": 563
},
{
"epoch": 0.16305290546400694,
"grad_norm": 1.4571917293027963,
"learning_rate": 1.884502986384414e-05,
"loss": 1.6308,
"step": 564
},
{
"epoch": 0.16334200636021973,
"grad_norm": 1.2273508652400458,
"learning_rate": 1.884074558807169e-05,
"loss": 1.7033,
"step": 565
},
{
"epoch": 0.16363110725643248,
"grad_norm": 1.3596367181968156,
"learning_rate": 1.8836453869760633e-05,
"loss": 1.7688,
"step": 566
},
{
"epoch": 0.16392020815264527,
"grad_norm": 1.1782222979069559,
"learning_rate": 1.883215471252394e-05,
"loss": 1.6864,
"step": 567
},
{
"epoch": 0.16420930904885805,
"grad_norm": 1.1876724035966852,
"learning_rate": 1.882784811998083e-05,
"loss": 1.651,
"step": 568
},
{
"epoch": 0.16449840994507084,
"grad_norm": 1.2807588292559342,
"learning_rate": 1.8823534095756787e-05,
"loss": 1.7127,
"step": 569
},
{
"epoch": 0.1647875108412836,
"grad_norm": 1.1959776867333827,
"learning_rate": 1.881921264348355e-05,
"loss": 1.6117,
"step": 570
},
{
"epoch": 0.16507661173749638,
"grad_norm": 1.1977710735486093,
"learning_rate": 1.881488376679912e-05,
"loss": 1.6682,
"step": 571
},
{
"epoch": 0.16536571263370917,
"grad_norm": 1.2133712471676765,
"learning_rate": 1.8810547469347737e-05,
"loss": 1.701,
"step": 572
},
{
"epoch": 0.16565481352992195,
"grad_norm": 1.1615194296116027,
"learning_rate": 1.880620375477989e-05,
"loss": 1.6957,
"step": 573
},
{
"epoch": 0.1659439144261347,
"grad_norm": 1.2434508263741582,
"learning_rate": 1.880185262675231e-05,
"loss": 1.6334,
"step": 574
},
{
"epoch": 0.1662330153223475,
"grad_norm": 1.1996625528866651,
"learning_rate": 1.8797494088927984e-05,
"loss": 1.7306,
"step": 575
},
{
"epoch": 0.16652211621856028,
"grad_norm": 1.25369862209721,
"learning_rate": 1.8793128144976114e-05,
"loss": 1.6611,
"step": 576
},
{
"epoch": 0.16681121711477306,
"grad_norm": 1.2478485036785194,
"learning_rate": 1.878875479857216e-05,
"loss": 1.6418,
"step": 577
},
{
"epoch": 0.16710031801098582,
"grad_norm": 1.3329254058696818,
"learning_rate": 1.8784374053397792e-05,
"loss": 1.6929,
"step": 578
},
{
"epoch": 0.1673894189071986,
"grad_norm": 1.3690716864028842,
"learning_rate": 1.8779985913140927e-05,
"loss": 1.6979,
"step": 579
},
{
"epoch": 0.1676785198034114,
"grad_norm": 1.2637256897353144,
"learning_rate": 1.877559038149569e-05,
"loss": 1.6229,
"step": 580
},
{
"epoch": 0.16796762069962418,
"grad_norm": 1.3196301095363738,
"learning_rate": 1.8771187462162452e-05,
"loss": 1.6806,
"step": 581
},
{
"epoch": 0.16825672159583693,
"grad_norm": 1.2751675322567242,
"learning_rate": 1.8766777158847777e-05,
"loss": 1.7331,
"step": 582
},
{
"epoch": 0.16854582249204972,
"grad_norm": 1.2217555466284777,
"learning_rate": 1.8762359475264464e-05,
"loss": 1.6439,
"step": 583
},
{
"epoch": 0.1688349233882625,
"grad_norm": 1.2138119129265534,
"learning_rate": 1.8757934415131513e-05,
"loss": 1.6229,
"step": 584
},
{
"epoch": 0.1691240242844753,
"grad_norm": 1.2481504467799762,
"learning_rate": 1.8753501982174142e-05,
"loss": 1.6527,
"step": 585
},
{
"epoch": 0.16941312518068807,
"grad_norm": 1.1551738364013855,
"learning_rate": 1.8749062180123778e-05,
"loss": 1.6202,
"step": 586
},
{
"epoch": 0.16970222607690083,
"grad_norm": 1.221158314927715,
"learning_rate": 1.8744615012718038e-05,
"loss": 1.674,
"step": 587
},
{
"epoch": 0.16999132697311362,
"grad_norm": 8.33538007172163,
"learning_rate": 1.8740160483700754e-05,
"loss": 1.7745,
"step": 588
},
{
"epoch": 0.1702804278693264,
"grad_norm": 1.2594866391024326,
"learning_rate": 1.8735698596821948e-05,
"loss": 1.7114,
"step": 589
},
{
"epoch": 0.17056952876553919,
"grad_norm": 1.1762832459510764,
"learning_rate": 1.8731229355837834e-05,
"loss": 1.6011,
"step": 590
},
{
"epoch": 0.17085862966175194,
"grad_norm": 1.2410876448625578,
"learning_rate": 1.8726752764510827e-05,
"loss": 1.5289,
"step": 591
},
{
"epoch": 0.17114773055796473,
"grad_norm": 1.2509478918932246,
"learning_rate": 1.8722268826609524e-05,
"loss": 1.6921,
"step": 592
},
{
"epoch": 0.1714368314541775,
"grad_norm": 1.2876270619453627,
"learning_rate": 1.87177775459087e-05,
"loss": 1.6632,
"step": 593
},
{
"epoch": 0.1717259323503903,
"grad_norm": 1.236444222374534,
"learning_rate": 1.8713278926189324e-05,
"loss": 1.628,
"step": 594
},
{
"epoch": 0.17201503324660306,
"grad_norm": 1.3325085992804107,
"learning_rate": 1.8708772971238528e-05,
"loss": 1.7139,
"step": 595
},
{
"epoch": 0.17230413414281584,
"grad_norm": 1.3054803605234377,
"learning_rate": 1.8704259684849636e-05,
"loss": 1.6715,
"step": 596
},
{
"epoch": 0.17259323503902863,
"grad_norm": 1.2114622828234745,
"learning_rate": 1.869973907082214e-05,
"loss": 1.6289,
"step": 597
},
{
"epoch": 0.1728823359352414,
"grad_norm": 1.3255943035899682,
"learning_rate": 1.8695211132961686e-05,
"loss": 1.5875,
"step": 598
},
{
"epoch": 0.17317143683145417,
"grad_norm": 1.2436093130787584,
"learning_rate": 1.86906758750801e-05,
"loss": 1.6891,
"step": 599
},
{
"epoch": 0.17346053772766695,
"grad_norm": 1.1789555197737465,
"learning_rate": 1.8686133300995374e-05,
"loss": 1.7097,
"step": 600
},
{
"epoch": 0.17374963862387974,
"grad_norm": 1.3205013238390153,
"learning_rate": 1.8681583414531644e-05,
"loss": 1.6392,
"step": 601
},
{
"epoch": 0.17403873952009252,
"grad_norm": 1.1648007546672043,
"learning_rate": 1.8677026219519212e-05,
"loss": 1.675,
"step": 602
},
{
"epoch": 0.17432784041630528,
"grad_norm": 1.2633263020507286,
"learning_rate": 1.8672461719794532e-05,
"loss": 1.5957,
"step": 603
},
{
"epoch": 0.17461694131251806,
"grad_norm": 1.2570430438589553,
"learning_rate": 1.8667889919200203e-05,
"loss": 1.7026,
"step": 604
},
{
"epoch": 0.17490604220873085,
"grad_norm": 1.203370562367749,
"learning_rate": 1.8663310821584976e-05,
"loss": 1.7054,
"step": 605
},
{
"epoch": 0.17519514310494363,
"grad_norm": 1.2387868298918792,
"learning_rate": 1.865872443080374e-05,
"loss": 1.7107,
"step": 606
},
{
"epoch": 0.1754842440011564,
"grad_norm": 1.197249289822212,
"learning_rate": 1.8654130750717526e-05,
"loss": 1.5961,
"step": 607
},
{
"epoch": 0.17577334489736918,
"grad_norm": 1.1922137775006185,
"learning_rate": 1.8649529785193502e-05,
"loss": 1.6349,
"step": 608
},
{
"epoch": 0.17606244579358196,
"grad_norm": 1.238013689830228,
"learning_rate": 1.8644921538104967e-05,
"loss": 1.6487,
"step": 609
},
{
"epoch": 0.17635154668979475,
"grad_norm": 1.198711220073165,
"learning_rate": 1.864030601333135e-05,
"loss": 1.6152,
"step": 610
},
{
"epoch": 0.1766406475860075,
"grad_norm": 1.1608845614417607,
"learning_rate": 1.8635683214758213e-05,
"loss": 1.6114,
"step": 611
},
{
"epoch": 0.1769297484822203,
"grad_norm": 1.223527743047107,
"learning_rate": 1.8631053146277233e-05,
"loss": 1.5324,
"step": 612
},
{
"epoch": 0.17721884937843307,
"grad_norm": 1.2244849270886753,
"learning_rate": 1.8626415811786215e-05,
"loss": 1.7019,
"step": 613
},
{
"epoch": 0.17750795027464586,
"grad_norm": 1.2422110644907864,
"learning_rate": 1.8621771215189065e-05,
"loss": 1.6713,
"step": 614
},
{
"epoch": 0.17779705117085862,
"grad_norm": 1.210470089961754,
"learning_rate": 1.861711936039583e-05,
"loss": 1.6569,
"step": 615
},
{
"epoch": 0.1780861520670714,
"grad_norm": 1.207776683125208,
"learning_rate": 1.8612460251322637e-05,
"loss": 1.5186,
"step": 616
},
{
"epoch": 0.1783752529632842,
"grad_norm": 1.2462777938743828,
"learning_rate": 1.8607793891891744e-05,
"loss": 1.7027,
"step": 617
},
{
"epoch": 0.17866435385949697,
"grad_norm": 1.2187274782133524,
"learning_rate": 1.8603120286031496e-05,
"loss": 1.7697,
"step": 618
},
{
"epoch": 0.17895345475570976,
"grad_norm": 1.2542678538432044,
"learning_rate": 1.859843943767635e-05,
"loss": 1.7017,
"step": 619
},
{
"epoch": 0.1792425556519225,
"grad_norm": 1.1778318765224127,
"learning_rate": 1.8593751350766854e-05,
"loss": 1.6475,
"step": 620
},
{
"epoch": 0.1795316565481353,
"grad_norm": 1.2711298413207304,
"learning_rate": 1.858905602924965e-05,
"loss": 1.6394,
"step": 621
},
{
"epoch": 0.17982075744434808,
"grad_norm": 1.232207257276827,
"learning_rate": 1.8584353477077476e-05,
"loss": 1.6074,
"step": 622
},
{
"epoch": 0.18010985834056087,
"grad_norm": 1.2540297946919436,
"learning_rate": 1.857964369820915e-05,
"loss": 1.6683,
"step": 623
},
{
"epoch": 0.18039895923677363,
"grad_norm": 1.2340677553615835,
"learning_rate": 1.857492669660957e-05,
"loss": 1.6716,
"step": 624
},
{
"epoch": 0.1806880601329864,
"grad_norm": 1.2065806455145902,
"learning_rate": 1.857020247624973e-05,
"loss": 1.5365,
"step": 625
},
{
"epoch": 0.1809771610291992,
"grad_norm": 1.2263400735274184,
"learning_rate": 1.856547104110669e-05,
"loss": 1.5683,
"step": 626
},
{
"epoch": 0.18126626192541198,
"grad_norm": 1.347386958745164,
"learning_rate": 1.8560732395163585e-05,
"loss": 1.7436,
"step": 627
},
{
"epoch": 0.18155536282162474,
"grad_norm": 1.3051300929582412,
"learning_rate": 1.8555986542409615e-05,
"loss": 1.7476,
"step": 628
},
{
"epoch": 0.18184446371783752,
"grad_norm": 1.3479071566939564,
"learning_rate": 1.855123348684006e-05,
"loss": 1.6623,
"step": 629
},
{
"epoch": 0.1821335646140503,
"grad_norm": 1.2568804583760171,
"learning_rate": 1.8546473232456255e-05,
"loss": 1.6444,
"step": 630
},
{
"epoch": 0.1824226655102631,
"grad_norm": 1.2426342867282067,
"learning_rate": 1.85417057832656e-05,
"loss": 1.5964,
"step": 631
},
{
"epoch": 0.18271176640647585,
"grad_norm": 1.2638177775731838,
"learning_rate": 1.853693114328154e-05,
"loss": 1.5322,
"step": 632
},
{
"epoch": 0.18300086730268864,
"grad_norm": 1.164069058129512,
"learning_rate": 1.8532149316523587e-05,
"loss": 1.5849,
"step": 633
},
{
"epoch": 0.18328996819890142,
"grad_norm": 1.297248418393498,
"learning_rate": 1.8527360307017306e-05,
"loss": 1.7388,
"step": 634
},
{
"epoch": 0.1835790690951142,
"grad_norm": 1.1341179063142641,
"learning_rate": 1.852256411879429e-05,
"loss": 1.7354,
"step": 635
},
{
"epoch": 0.18386816999132696,
"grad_norm": 1.215237098350405,
"learning_rate": 1.8517760755892197e-05,
"loss": 1.6552,
"step": 636
},
{
"epoch": 0.18415727088753975,
"grad_norm": 1.1982757446916235,
"learning_rate": 1.8512950222354704e-05,
"loss": 1.6313,
"step": 637
},
{
"epoch": 0.18444637178375253,
"grad_norm": 1.2063061953012788,
"learning_rate": 1.850813252223154e-05,
"loss": 1.6108,
"step": 638
},
{
"epoch": 0.18473547267996532,
"grad_norm": 1.2120232512101812,
"learning_rate": 1.8503307659578466e-05,
"loss": 1.6357,
"step": 639
},
{
"epoch": 0.18502457357617808,
"grad_norm": 1.2479587047666476,
"learning_rate": 1.8498475638457265e-05,
"loss": 1.6444,
"step": 640
},
{
"epoch": 0.18531367447239086,
"grad_norm": 1.2758952934748575,
"learning_rate": 1.849363646293575e-05,
"loss": 1.632,
"step": 641
},
{
"epoch": 0.18560277536860365,
"grad_norm": 1.1490779391882489,
"learning_rate": 1.8488790137087764e-05,
"loss": 1.6316,
"step": 642
},
{
"epoch": 0.18589187626481643,
"grad_norm": 1.195110289142274,
"learning_rate": 1.8483936664993152e-05,
"loss": 1.6018,
"step": 643
},
{
"epoch": 0.1861809771610292,
"grad_norm": 1.283174200034784,
"learning_rate": 1.847907605073779e-05,
"loss": 1.6488,
"step": 644
},
{
"epoch": 0.18647007805724197,
"grad_norm": 1.2662030686633299,
"learning_rate": 1.847420829841357e-05,
"loss": 1.6538,
"step": 645
},
{
"epoch": 0.18675917895345476,
"grad_norm": 1.2237194592018654,
"learning_rate": 1.8469333412118375e-05,
"loss": 1.6171,
"step": 646
},
{
"epoch": 0.18704827984966754,
"grad_norm": 1.155988769874379,
"learning_rate": 1.846445139595611e-05,
"loss": 1.5969,
"step": 647
},
{
"epoch": 0.1873373807458803,
"grad_norm": 1.2416911437703613,
"learning_rate": 1.845956225403667e-05,
"loss": 1.6744,
"step": 648
},
{
"epoch": 0.18762648164209308,
"grad_norm": 1.1875358817766466,
"learning_rate": 1.8454665990475967e-05,
"loss": 1.5515,
"step": 649
},
{
"epoch": 0.18791558253830587,
"grad_norm": 1.1818241708803934,
"learning_rate": 1.844976260939589e-05,
"loss": 1.5487,
"step": 650
},
{
"epoch": 0.18820468343451865,
"grad_norm": 1.1840647734438647,
"learning_rate": 1.8444852114924325e-05,
"loss": 1.6148,
"step": 651
},
{
"epoch": 0.1884937843307314,
"grad_norm": 1.2706827502060694,
"learning_rate": 1.8439934511195147e-05,
"loss": 1.6112,
"step": 652
},
{
"epoch": 0.1887828852269442,
"grad_norm": 1.220131649175792,
"learning_rate": 1.843500980234822e-05,
"loss": 1.6449,
"step": 653
},
{
"epoch": 0.18907198612315698,
"grad_norm": 1.103922527658955,
"learning_rate": 1.8430077992529388e-05,
"loss": 1.6444,
"step": 654
},
{
"epoch": 0.18936108701936977,
"grad_norm": 1.1899809619524633,
"learning_rate": 1.8425139085890466e-05,
"loss": 1.5967,
"step": 655
},
{
"epoch": 0.18965018791558255,
"grad_norm": 1.2244806367070153,
"learning_rate": 1.8420193086589252e-05,
"loss": 1.6504,
"step": 656
},
{
"epoch": 0.1899392888117953,
"grad_norm": 1.1912067775204556,
"learning_rate": 1.841523999878951e-05,
"loss": 1.718,
"step": 657
},
{
"epoch": 0.1902283897080081,
"grad_norm": 1.2117478146838403,
"learning_rate": 1.8410279826660976e-05,
"loss": 1.5804,
"step": 658
},
{
"epoch": 0.19051749060422088,
"grad_norm": 1.1341524351911334,
"learning_rate": 1.840531257437934e-05,
"loss": 1.6287,
"step": 659
},
{
"epoch": 0.19080659150043366,
"grad_norm": 1.1914356537636899,
"learning_rate": 1.8400338246126265e-05,
"loss": 1.6563,
"step": 660
},
{
"epoch": 0.19109569239664642,
"grad_norm": 1.4105624776944174,
"learning_rate": 1.839535684608936e-05,
"loss": 1.7139,
"step": 661
},
{
"epoch": 0.1913847932928592,
"grad_norm": 1.1645723203046823,
"learning_rate": 1.8390368378462197e-05,
"loss": 1.7166,
"step": 662
},
{
"epoch": 0.191673894189072,
"grad_norm": 1.2936969076152187,
"learning_rate": 1.8385372847444296e-05,
"loss": 1.6482,
"step": 663
},
{
"epoch": 0.19196299508528478,
"grad_norm": 1.1569033962294648,
"learning_rate": 1.838037025724111e-05,
"loss": 1.6843,
"step": 664
},
{
"epoch": 0.19225209598149753,
"grad_norm": 1.1405851915704683,
"learning_rate": 1.837536061206405e-05,
"loss": 1.6033,
"step": 665
},
{
"epoch": 0.19254119687771032,
"grad_norm": 1.2542382381366322,
"learning_rate": 1.8370343916130467e-05,
"loss": 1.6706,
"step": 666
},
{
"epoch": 0.1928302977739231,
"grad_norm": 1.2159546224332614,
"learning_rate": 1.8365320173663638e-05,
"loss": 1.6918,
"step": 667
},
{
"epoch": 0.1931193986701359,
"grad_norm": 1.2532817994557603,
"learning_rate": 1.8360289388892773e-05,
"loss": 1.6683,
"step": 668
},
{
"epoch": 0.19340849956634865,
"grad_norm": 1.2395321203333016,
"learning_rate": 1.8355251566053016e-05,
"loss": 1.629,
"step": 669
},
{
"epoch": 0.19369760046256143,
"grad_norm": 1.1689572668191017,
"learning_rate": 1.8350206709385437e-05,
"loss": 1.6462,
"step": 670
},
{
"epoch": 0.19398670135877422,
"grad_norm": 1.1981131507977363,
"learning_rate": 1.8345154823137015e-05,
"loss": 1.5874,
"step": 671
},
{
"epoch": 0.194275802254987,
"grad_norm": 1.1854854067275211,
"learning_rate": 1.834009591156067e-05,
"loss": 1.5968,
"step": 672
},
{
"epoch": 0.19456490315119976,
"grad_norm": 1.1769715429764516,
"learning_rate": 1.8335029978915207e-05,
"loss": 1.5856,
"step": 673
},
{
"epoch": 0.19485400404741254,
"grad_norm": 1.1759193873525724,
"learning_rate": 1.8329957029465367e-05,
"loss": 1.676,
"step": 674
},
{
"epoch": 0.19514310494362533,
"grad_norm": 1.2034916407402159,
"learning_rate": 1.8324877067481782e-05,
"loss": 1.6385,
"step": 675
},
{
"epoch": 0.1954322058398381,
"grad_norm": 1.1498736100928149,
"learning_rate": 1.8319790097240998e-05,
"loss": 1.5622,
"step": 676
},
{
"epoch": 0.19572130673605087,
"grad_norm": 1.1833854630281373,
"learning_rate": 1.8314696123025456e-05,
"loss": 1.5906,
"step": 677
},
{
"epoch": 0.19601040763226366,
"grad_norm": 1.3064848599083938,
"learning_rate": 1.8309595149123486e-05,
"loss": 1.5784,
"step": 678
},
{
"epoch": 0.19629950852847644,
"grad_norm": 1.207598354497605,
"learning_rate": 1.8304487179829325e-05,
"loss": 1.6724,
"step": 679
},
{
"epoch": 0.19658860942468923,
"grad_norm": 1.3657596862796557,
"learning_rate": 1.829937221944309e-05,
"loss": 1.6808,
"step": 680
},
{
"epoch": 0.19687771032090198,
"grad_norm": 1.2300109610049414,
"learning_rate": 1.8294250272270787e-05,
"loss": 1.701,
"step": 681
},
{
"epoch": 0.19716681121711477,
"grad_norm": 1.1932414325288254,
"learning_rate": 1.82891213426243e-05,
"loss": 1.6844,
"step": 682
},
{
"epoch": 0.19745591211332755,
"grad_norm": 1.2040899317538816,
"learning_rate": 1.8283985434821394e-05,
"loss": 1.6674,
"step": 683
},
{
"epoch": 0.19774501300954034,
"grad_norm": 1.20051829187703,
"learning_rate": 1.827884255318571e-05,
"loss": 1.6988,
"step": 684
},
{
"epoch": 0.1980341139057531,
"grad_norm": 1.1808862977245562,
"learning_rate": 1.8273692702046754e-05,
"loss": 1.5823,
"step": 685
},
{
"epoch": 0.19832321480196588,
"grad_norm": 1.226726509340714,
"learning_rate": 1.8268535885739905e-05,
"loss": 1.6371,
"step": 686
},
{
"epoch": 0.19861231569817867,
"grad_norm": 1.254783885869565,
"learning_rate": 1.8263372108606404e-05,
"loss": 1.5849,
"step": 687
},
{
"epoch": 0.19890141659439145,
"grad_norm": 1.2210257028134839,
"learning_rate": 1.8258201374993355e-05,
"loss": 1.7316,
"step": 688
},
{
"epoch": 0.1991905174906042,
"grad_norm": 1.27099587011664,
"learning_rate": 1.8253023689253707e-05,
"loss": 1.592,
"step": 689
},
{
"epoch": 0.199479618386817,
"grad_norm": 1.366689582202721,
"learning_rate": 1.8247839055746276e-05,
"loss": 1.5789,
"step": 690
},
{
"epoch": 0.19976871928302978,
"grad_norm": 1.1755442721898772,
"learning_rate": 1.8242647478835717e-05,
"loss": 1.6069,
"step": 691
},
{
"epoch": 0.20005782017924256,
"grad_norm": 1.2600721970141062,
"learning_rate": 1.8237448962892537e-05,
"loss": 1.7384,
"step": 692
},
{
"epoch": 0.20034692107545535,
"grad_norm": 1.22621954725729,
"learning_rate": 1.823224351229308e-05,
"loss": 1.6226,
"step": 693
},
{
"epoch": 0.2006360219716681,
"grad_norm": 1.2182097639154552,
"learning_rate": 1.822703113141953e-05,
"loss": 1.613,
"step": 694
},
{
"epoch": 0.2009251228678809,
"grad_norm": 1.2785953541313604,
"learning_rate": 1.82218118246599e-05,
"loss": 1.7014,
"step": 695
},
{
"epoch": 0.20121422376409367,
"grad_norm": 1.2338324878833378,
"learning_rate": 1.821658559640804e-05,
"loss": 1.6298,
"step": 696
},
{
"epoch": 0.20150332466030646,
"grad_norm": 1.1768453406356614,
"learning_rate": 1.821135245106363e-05,
"loss": 1.5619,
"step": 697
},
{
"epoch": 0.20179242555651922,
"grad_norm": 1.1771764186726315,
"learning_rate": 1.820611239303216e-05,
"loss": 1.68,
"step": 698
},
{
"epoch": 0.202081526452732,
"grad_norm": 1.1751259370580003,
"learning_rate": 1.8200865426724955e-05,
"loss": 1.6353,
"step": 699
},
{
"epoch": 0.2023706273489448,
"grad_norm": 1.234717870608629,
"learning_rate": 1.819561155655914e-05,
"loss": 1.6222,
"step": 700
},
{
"epoch": 0.20265972824515757,
"grad_norm": 1.3036974701284907,
"learning_rate": 1.819035078695767e-05,
"loss": 1.6272,
"step": 701
},
{
"epoch": 0.20294882914137033,
"grad_norm": 1.201996899703672,
"learning_rate": 1.8185083122349288e-05,
"loss": 1.6977,
"step": 702
},
{
"epoch": 0.20323793003758311,
"grad_norm": 1.2513029094302637,
"learning_rate": 1.8179808567168556e-05,
"loss": 1.7145,
"step": 703
},
{
"epoch": 0.2035270309337959,
"grad_norm": 1.2829160939047408,
"learning_rate": 1.8174527125855836e-05,
"loss": 1.7475,
"step": 704
},
{
"epoch": 0.20381613183000868,
"grad_norm": 1.2634189347114115,
"learning_rate": 1.8169238802857275e-05,
"loss": 1.7567,
"step": 705
},
{
"epoch": 0.20410523272622144,
"grad_norm": 1.1500402066275863,
"learning_rate": 1.8163943602624834e-05,
"loss": 1.568,
"step": 706
},
{
"epoch": 0.20439433362243423,
"grad_norm": 1.2208940822026542,
"learning_rate": 1.815864152961624e-05,
"loss": 1.6101,
"step": 707
},
{
"epoch": 0.204683434518647,
"grad_norm": 1.2920340236775505,
"learning_rate": 1.8153332588295023e-05,
"loss": 1.6039,
"step": 708
},
{
"epoch": 0.2049725354148598,
"grad_norm": 1.3082533794058193,
"learning_rate": 1.814801678313049e-05,
"loss": 1.66,
"step": 709
},
{
"epoch": 0.20526163631107255,
"grad_norm": 1.2031595547172316,
"learning_rate": 1.8142694118597727e-05,
"loss": 1.6835,
"step": 710
},
{
"epoch": 0.20555073720728534,
"grad_norm": 1.1799641826600815,
"learning_rate": 1.8137364599177587e-05,
"loss": 1.6254,
"step": 711
},
{
"epoch": 0.20583983810349812,
"grad_norm": 1.208426243281953,
"learning_rate": 1.8132028229356703e-05,
"loss": 1.6405,
"step": 712
},
{
"epoch": 0.2061289389997109,
"grad_norm": 1.1403284646294907,
"learning_rate": 1.8126685013627475e-05,
"loss": 1.7065,
"step": 713
},
{
"epoch": 0.20641803989592367,
"grad_norm": 1.2249537048855292,
"learning_rate": 1.8121334956488058e-05,
"loss": 1.6447,
"step": 714
},
{
"epoch": 0.20670714079213645,
"grad_norm": 1.189614154915905,
"learning_rate": 1.8115978062442375e-05,
"loss": 1.5539,
"step": 715
},
{
"epoch": 0.20699624168834924,
"grad_norm": 1.1643490997811285,
"learning_rate": 1.81106143360001e-05,
"loss": 1.6068,
"step": 716
},
{
"epoch": 0.20728534258456202,
"grad_norm": 1.1646757540028276,
"learning_rate": 1.8105243781676663e-05,
"loss": 1.5891,
"step": 717
},
{
"epoch": 0.20757444348077478,
"grad_norm": 1.2441086225997955,
"learning_rate": 1.809986640399323e-05,
"loss": 1.529,
"step": 718
},
{
"epoch": 0.20786354437698756,
"grad_norm": 1.3159032556741779,
"learning_rate": 1.809448220747673e-05,
"loss": 1.5943,
"step": 719
},
{
"epoch": 0.20815264527320035,
"grad_norm": 1.1977613211999474,
"learning_rate": 1.8089091196659822e-05,
"loss": 1.6186,
"step": 720
},
{
"epoch": 0.20844174616941313,
"grad_norm": 1.1611286191966523,
"learning_rate": 1.8083693376080896e-05,
"loss": 1.609,
"step": 721
},
{
"epoch": 0.2087308470656259,
"grad_norm": 1.2479677009723977,
"learning_rate": 1.807828875028408e-05,
"loss": 1.6632,
"step": 722
},
{
"epoch": 0.20901994796183868,
"grad_norm": 1.2198060446129206,
"learning_rate": 1.8072877323819247e-05,
"loss": 1.6938,
"step": 723
},
{
"epoch": 0.20930904885805146,
"grad_norm": 1.1959812561872514,
"learning_rate": 1.806745910124196e-05,
"loss": 1.6715,
"step": 724
},
{
"epoch": 0.20959814975426425,
"grad_norm": 1.1644575837628937,
"learning_rate": 1.8062034087113538e-05,
"loss": 1.654,
"step": 725
},
{
"epoch": 0.209887250650477,
"grad_norm": 1.1952595776641282,
"learning_rate": 1.8056602286000995e-05,
"loss": 1.7361,
"step": 726
},
{
"epoch": 0.2101763515466898,
"grad_norm": 1.1187248069836293,
"learning_rate": 1.8051163702477067e-05,
"loss": 1.4882,
"step": 727
},
{
"epoch": 0.21046545244290257,
"grad_norm": 1.23860503080743,
"learning_rate": 1.80457183411202e-05,
"loss": 1.6494,
"step": 728
},
{
"epoch": 0.21075455333911536,
"grad_norm": 1.1198629149288368,
"learning_rate": 1.8040266206514548e-05,
"loss": 1.6625,
"step": 729
},
{
"epoch": 0.21104365423532814,
"grad_norm": 1.1312334064032668,
"learning_rate": 1.8034807303249956e-05,
"loss": 1.644,
"step": 730
},
{
"epoch": 0.2113327551315409,
"grad_norm": 1.178023621775733,
"learning_rate": 1.8029341635921985e-05,
"loss": 1.6237,
"step": 731
},
{
"epoch": 0.21162185602775369,
"grad_norm": 1.1831551646009177,
"learning_rate": 1.8023869209131876e-05,
"loss": 1.684,
"step": 732
},
{
"epoch": 0.21191095692396647,
"grad_norm": 1.1269798393522688,
"learning_rate": 1.8018390027486564e-05,
"loss": 1.6614,
"step": 733
},
{
"epoch": 0.21220005782017926,
"grad_norm": 1.163617004294811,
"learning_rate": 1.8012904095598672e-05,
"loss": 1.5863,
"step": 734
},
{
"epoch": 0.212489158716392,
"grad_norm": 1.1666239101175062,
"learning_rate": 1.8007411418086504e-05,
"loss": 1.6286,
"step": 735
},
{
"epoch": 0.2127782596126048,
"grad_norm": 1.1783363837507064,
"learning_rate": 1.8001911999574048e-05,
"loss": 1.6024,
"step": 736
},
{
"epoch": 0.21306736050881758,
"grad_norm": 1.1324429879608466,
"learning_rate": 1.7996405844690955e-05,
"loss": 1.6086,
"step": 737
},
{
"epoch": 0.21335646140503037,
"grad_norm": 1.1857513831899675,
"learning_rate": 1.7990892958072562e-05,
"loss": 1.6637,
"step": 738
},
{
"epoch": 0.21364556230124312,
"grad_norm": 1.20546977750714,
"learning_rate": 1.798537334435986e-05,
"loss": 1.651,
"step": 739
},
{
"epoch": 0.2139346631974559,
"grad_norm": 1.214143713272185,
"learning_rate": 1.7979847008199515e-05,
"loss": 1.5663,
"step": 740
},
{
"epoch": 0.2142237640936687,
"grad_norm": 1.2482881773540793,
"learning_rate": 1.7974313954243846e-05,
"loss": 1.6634,
"step": 741
},
{
"epoch": 0.21451286498988148,
"grad_norm": 1.3788124938610393,
"learning_rate": 1.796877418715082e-05,
"loss": 1.7858,
"step": 742
},
{
"epoch": 0.21480196588609424,
"grad_norm": 1.210426850026346,
"learning_rate": 1.796322771158407e-05,
"loss": 1.5901,
"step": 743
},
{
"epoch": 0.21509106678230702,
"grad_norm": 1.1818842755255796,
"learning_rate": 1.7957674532212862e-05,
"loss": 1.5516,
"step": 744
},
{
"epoch": 0.2153801676785198,
"grad_norm": 1.2331450769496057,
"learning_rate": 1.7952114653712123e-05,
"loss": 1.6224,
"step": 745
},
{
"epoch": 0.2156692685747326,
"grad_norm": 1.1415088042700048,
"learning_rate": 1.7946548080762402e-05,
"loss": 1.7262,
"step": 746
},
{
"epoch": 0.21595836947094535,
"grad_norm": 1.0990551729161935,
"learning_rate": 1.7940974818049898e-05,
"loss": 1.674,
"step": 747
},
{
"epoch": 0.21624747036715813,
"grad_norm": 1.217412085009134,
"learning_rate": 1.7935394870266425e-05,
"loss": 1.6138,
"step": 748
},
{
"epoch": 0.21653657126337092,
"grad_norm": 1.167638566385817,
"learning_rate": 1.7929808242109444e-05,
"loss": 1.6414,
"step": 749
},
{
"epoch": 0.2168256721595837,
"grad_norm": 1.2003093360263695,
"learning_rate": 1.7924214938282026e-05,
"loss": 1.6289,
"step": 750
},
{
"epoch": 0.21711477305579646,
"grad_norm": 1.2685172755141476,
"learning_rate": 1.7918614963492872e-05,
"loss": 1.7019,
"step": 751
},
{
"epoch": 0.21740387395200925,
"grad_norm": 1.173005539771057,
"learning_rate": 1.7913008322456283e-05,
"loss": 1.6812,
"step": 752
},
{
"epoch": 0.21769297484822203,
"grad_norm": 1.2159228008333007,
"learning_rate": 1.7907395019892196e-05,
"loss": 1.6077,
"step": 753
},
{
"epoch": 0.21798207574443482,
"grad_norm": 1.1742389210088622,
"learning_rate": 1.7901775060526132e-05,
"loss": 1.622,
"step": 754
},
{
"epoch": 0.21827117664064757,
"grad_norm": 1.2128889995971577,
"learning_rate": 1.789614844908923e-05,
"loss": 1.6743,
"step": 755
},
{
"epoch": 0.21856027753686036,
"grad_norm": 1.1667752847351143,
"learning_rate": 1.7890515190318222e-05,
"loss": 1.5754,
"step": 756
},
{
"epoch": 0.21884937843307314,
"grad_norm": 1.1295223300664317,
"learning_rate": 1.7884875288955448e-05,
"loss": 1.6738,
"step": 757
},
{
"epoch": 0.21913847932928593,
"grad_norm": 1.261068879100486,
"learning_rate": 1.7879228749748824e-05,
"loss": 1.6835,
"step": 758
},
{
"epoch": 0.2194275802254987,
"grad_norm": 1.1728226481913466,
"learning_rate": 1.7873575577451863e-05,
"loss": 1.6891,
"step": 759
},
{
"epoch": 0.21971668112171147,
"grad_norm": 1.1787350920697617,
"learning_rate": 1.7867915776823663e-05,
"loss": 1.6257,
"step": 760
},
{
"epoch": 0.22000578201792426,
"grad_norm": 1.1620635102842405,
"learning_rate": 1.7862249352628897e-05,
"loss": 1.6228,
"step": 761
},
{
"epoch": 0.22029488291413704,
"grad_norm": 1.174838883761586,
"learning_rate": 1.785657630963782e-05,
"loss": 1.6795,
"step": 762
},
{
"epoch": 0.2205839838103498,
"grad_norm": 1.2540807508404137,
"learning_rate": 1.785089665262625e-05,
"loss": 1.6921,
"step": 763
},
{
"epoch": 0.22087308470656258,
"grad_norm": 1.1328480065063387,
"learning_rate": 1.784521038637558e-05,
"loss": 1.6261,
"step": 764
},
{
"epoch": 0.22116218560277537,
"grad_norm": 1.1913711901780004,
"learning_rate": 1.783951751567277e-05,
"loss": 1.7268,
"step": 765
},
{
"epoch": 0.22145128649898815,
"grad_norm": 1.1168162011581586,
"learning_rate": 1.7833818045310335e-05,
"loss": 1.6462,
"step": 766
},
{
"epoch": 0.22174038739520094,
"grad_norm": 1.1597448679252735,
"learning_rate": 1.782811198008634e-05,
"loss": 1.6369,
"step": 767
},
{
"epoch": 0.2220294882914137,
"grad_norm": 1.1983400772535562,
"learning_rate": 1.782239932480441e-05,
"loss": 1.6555,
"step": 768
},
{
"epoch": 0.22231858918762648,
"grad_norm": 1.1771360817965915,
"learning_rate": 1.7816680084273724e-05,
"loss": 1.6722,
"step": 769
},
{
"epoch": 0.22260769008383927,
"grad_norm": 1.202362947433072,
"learning_rate": 1.7810954263308987e-05,
"loss": 1.6489,
"step": 770
},
{
"epoch": 0.22289679098005205,
"grad_norm": 1.1514930888062975,
"learning_rate": 1.780522186673046e-05,
"loss": 1.5065,
"step": 771
},
{
"epoch": 0.2231858918762648,
"grad_norm": 1.2543833283472365,
"learning_rate": 1.779948289936393e-05,
"loss": 1.6361,
"step": 772
},
{
"epoch": 0.2234749927724776,
"grad_norm": 1.1103075126565838,
"learning_rate": 1.779373736604072e-05,
"loss": 1.5877,
"step": 773
},
{
"epoch": 0.22376409366869038,
"grad_norm": 1.2145730232558314,
"learning_rate": 1.7787985271597678e-05,
"loss": 1.586,
"step": 774
},
{
"epoch": 0.22405319456490316,
"grad_norm": 1.2583750794569701,
"learning_rate": 1.7782226620877178e-05,
"loss": 1.6928,
"step": 775
},
{
"epoch": 0.22434229546111592,
"grad_norm": 1.192600045488377,
"learning_rate": 1.777646141872711e-05,
"loss": 1.6711,
"step": 776
},
{
"epoch": 0.2246313963573287,
"grad_norm": 1.2325135805846623,
"learning_rate": 1.7770689670000882e-05,
"loss": 1.6051,
"step": 777
},
{
"epoch": 0.2249204972535415,
"grad_norm": 1.2445267162996139,
"learning_rate": 1.7764911379557415e-05,
"loss": 1.6704,
"step": 778
},
{
"epoch": 0.22520959814975428,
"grad_norm": 1.164473435396523,
"learning_rate": 1.7759126552261133e-05,
"loss": 1.6788,
"step": 779
},
{
"epoch": 0.22549869904596703,
"grad_norm": 1.3267375597230986,
"learning_rate": 1.7753335192981963e-05,
"loss": 1.6578,
"step": 780
},
{
"epoch": 0.22578779994217982,
"grad_norm": 1.138551289696566,
"learning_rate": 1.7747537306595336e-05,
"loss": 1.6774,
"step": 781
},
{
"epoch": 0.2260769008383926,
"grad_norm": 1.2577334291491808,
"learning_rate": 1.774173289798217e-05,
"loss": 1.6842,
"step": 782
},
{
"epoch": 0.2263660017346054,
"grad_norm": 1.1496680909323838,
"learning_rate": 1.7735921972028883e-05,
"loss": 1.628,
"step": 783
},
{
"epoch": 0.22665510263081814,
"grad_norm": 1.179850018613425,
"learning_rate": 1.773010453362737e-05,
"loss": 1.6444,
"step": 784
},
{
"epoch": 0.22694420352703093,
"grad_norm": 1.2654130215050725,
"learning_rate": 1.7724280587675017e-05,
"loss": 1.4592,
"step": 785
},
{
"epoch": 0.22723330442324371,
"grad_norm": 1.1907665269492804,
"learning_rate": 1.7718450139074685e-05,
"loss": 1.58,
"step": 786
},
{
"epoch": 0.2275224053194565,
"grad_norm": 1.217021995188848,
"learning_rate": 1.7712613192734703e-05,
"loss": 1.575,
"step": 787
},
{
"epoch": 0.22781150621566926,
"grad_norm": 1.1808590593353434,
"learning_rate": 1.7706769753568882e-05,
"loss": 1.6172,
"step": 788
},
{
"epoch": 0.22810060711188204,
"grad_norm": 1.196849426701046,
"learning_rate": 1.770091982649649e-05,
"loss": 1.6436,
"step": 789
},
{
"epoch": 0.22838970800809483,
"grad_norm": 1.1412097565678294,
"learning_rate": 1.7695063416442263e-05,
"loss": 1.6423,
"step": 790
},
{
"epoch": 0.2286788089043076,
"grad_norm": 1.1352763403847086,
"learning_rate": 1.7689200528336384e-05,
"loss": 1.6991,
"step": 791
},
{
"epoch": 0.22896790980052037,
"grad_norm": 1.1832029724179747,
"learning_rate": 1.7683331167114503e-05,
"loss": 1.6004,
"step": 792
},
{
"epoch": 0.22925701069673315,
"grad_norm": 1.190404880041848,
"learning_rate": 1.767745533771771e-05,
"loss": 1.6232,
"step": 793
},
{
"epoch": 0.22954611159294594,
"grad_norm": 1.1425994526274166,
"learning_rate": 1.7671573045092546e-05,
"loss": 1.6601,
"step": 794
},
{
"epoch": 0.22983521248915872,
"grad_norm": 4.306902746171457,
"learning_rate": 1.766568429419099e-05,
"loss": 1.6119,
"step": 795
},
{
"epoch": 0.23012431338537148,
"grad_norm": 1.2116557080614034,
"learning_rate": 1.7659789089970457e-05,
"loss": 1.553,
"step": 796
},
{
"epoch": 0.23041341428158427,
"grad_norm": 1.133998906444825,
"learning_rate": 1.7653887437393792e-05,
"loss": 1.5345,
"step": 797
},
{
"epoch": 0.23070251517779705,
"grad_norm": 1.2512101232473924,
"learning_rate": 1.764797934142928e-05,
"loss": 1.6369,
"step": 798
},
{
"epoch": 0.23099161607400984,
"grad_norm": 1.1088774895792184,
"learning_rate": 1.7642064807050617e-05,
"loss": 1.6432,
"step": 799
},
{
"epoch": 0.2312807169702226,
"grad_norm": 1.2101553368194318,
"learning_rate": 1.7636143839236934e-05,
"loss": 1.6479,
"step": 800
},
{
"epoch": 0.23156981786643538,
"grad_norm": 1.191660567807356,
"learning_rate": 1.763021644297276e-05,
"loss": 1.5965,
"step": 801
},
{
"epoch": 0.23185891876264816,
"grad_norm": 1.143724831739925,
"learning_rate": 1.7624282623248043e-05,
"loss": 1.6417,
"step": 802
},
{
"epoch": 0.23214801965886095,
"grad_norm": 1.143027015736506,
"learning_rate": 1.7618342385058147e-05,
"loss": 1.6015,
"step": 803
},
{
"epoch": 0.23243712055507373,
"grad_norm": 1.1346799180515472,
"learning_rate": 1.761239573340383e-05,
"loss": 1.6608,
"step": 804
},
{
"epoch": 0.2327262214512865,
"grad_norm": 1.1117892300100156,
"learning_rate": 1.760644267329125e-05,
"loss": 1.6395,
"step": 805
},
{
"epoch": 0.23301532234749928,
"grad_norm": 1.180786293910612,
"learning_rate": 1.7600483209731963e-05,
"loss": 1.7766,
"step": 806
},
{
"epoch": 0.23330442324371206,
"grad_norm": 1.1174984255388902,
"learning_rate": 1.7594517347742918e-05,
"loss": 1.5313,
"step": 807
},
{
"epoch": 0.23359352413992485,
"grad_norm": 1.17896186676479,
"learning_rate": 1.7588545092346438e-05,
"loss": 1.7089,
"step": 808
},
{
"epoch": 0.2338826250361376,
"grad_norm": 1.149670644485711,
"learning_rate": 1.7582566448570245e-05,
"loss": 1.6145,
"step": 809
},
{
"epoch": 0.2341717259323504,
"grad_norm": 1.2037877986724042,
"learning_rate": 1.7576581421447428e-05,
"loss": 1.663,
"step": 810
},
{
"epoch": 0.23446082682856317,
"grad_norm": 1.1979774494369526,
"learning_rate": 1.7570590016016455e-05,
"loss": 1.6597,
"step": 811
},
{
"epoch": 0.23474992772477596,
"grad_norm": 1.198242854855127,
"learning_rate": 1.7564592237321162e-05,
"loss": 1.6047,
"step": 812
},
{
"epoch": 0.23503902862098872,
"grad_norm": 1.2006909754615687,
"learning_rate": 1.7558588090410747e-05,
"loss": 1.7242,
"step": 813
},
{
"epoch": 0.2353281295172015,
"grad_norm": 1.1396286336459736,
"learning_rate": 1.7552577580339768e-05,
"loss": 1.5758,
"step": 814
},
{
"epoch": 0.23561723041341429,
"grad_norm": 1.1676518798162923,
"learning_rate": 1.7546560712168154e-05,
"loss": 1.6103,
"step": 815
},
{
"epoch": 0.23590633130962707,
"grad_norm": 1.1417358311927566,
"learning_rate": 1.7540537490961168e-05,
"loss": 1.6361,
"step": 816
},
{
"epoch": 0.23619543220583983,
"grad_norm": 1.1842710928884372,
"learning_rate": 1.753450792178943e-05,
"loss": 1.6522,
"step": 817
},
{
"epoch": 0.2364845331020526,
"grad_norm": 1.2334219525738506,
"learning_rate": 1.752847200972891e-05,
"loss": 1.6686,
"step": 818
},
{
"epoch": 0.2367736339982654,
"grad_norm": 1.1595381193664076,
"learning_rate": 1.7522429759860903e-05,
"loss": 1.6584,
"step": 819
},
{
"epoch": 0.23706273489447818,
"grad_norm": 1.1998920618458444,
"learning_rate": 1.751638117727205e-05,
"loss": 1.5457,
"step": 820
},
{
"epoch": 0.23735183579069094,
"grad_norm": 1.2098085468599757,
"learning_rate": 1.7510326267054325e-05,
"loss": 1.611,
"step": 821
},
{
"epoch": 0.23764093668690373,
"grad_norm": 1.145177051100393,
"learning_rate": 1.7504265034305023e-05,
"loss": 1.5325,
"step": 822
},
{
"epoch": 0.2379300375831165,
"grad_norm": 1.1017627071573495,
"learning_rate": 1.7498197484126756e-05,
"loss": 1.5977,
"step": 823
},
{
"epoch": 0.2382191384793293,
"grad_norm": 1.2445774010012118,
"learning_rate": 1.749212362162747e-05,
"loss": 1.7187,
"step": 824
},
{
"epoch": 0.23850823937554205,
"grad_norm": 1.2507033146298203,
"learning_rate": 1.7486043451920415e-05,
"loss": 1.6508,
"step": 825
},
{
"epoch": 0.23879734027175484,
"grad_norm": 1.1841673566917597,
"learning_rate": 1.747995698012415e-05,
"loss": 1.6084,
"step": 826
},
{
"epoch": 0.23908644116796762,
"grad_norm": 1.1396731986156217,
"learning_rate": 1.7473864211362544e-05,
"loss": 1.6588,
"step": 827
},
{
"epoch": 0.2393755420641804,
"grad_norm": 1.204843676859692,
"learning_rate": 1.7467765150764763e-05,
"loss": 1.6455,
"step": 828
},
{
"epoch": 0.23966464296039316,
"grad_norm": 1.2736891694731587,
"learning_rate": 1.7461659803465273e-05,
"loss": 1.638,
"step": 829
},
{
"epoch": 0.23995374385660595,
"grad_norm": 1.2082297185144963,
"learning_rate": 1.7455548174603828e-05,
"loss": 1.6357,
"step": 830
},
{
"epoch": 0.24024284475281873,
"grad_norm": 1.1833363832715826,
"learning_rate": 1.7449430269325474e-05,
"loss": 1.6351,
"step": 831
},
{
"epoch": 0.24053194564903152,
"grad_norm": 1.1586148600596027,
"learning_rate": 1.7443306092780543e-05,
"loss": 1.6456,
"step": 832
},
{
"epoch": 0.24082104654524428,
"grad_norm": 1.1151118614534092,
"learning_rate": 1.743717565012464e-05,
"loss": 1.6143,
"step": 833
},
{
"epoch": 0.24111014744145706,
"grad_norm": 1.1764063110634089,
"learning_rate": 1.7431038946518648e-05,
"loss": 1.6603,
"step": 834
},
{
"epoch": 0.24139924833766985,
"grad_norm": 1.1417362079110664,
"learning_rate": 1.7424895987128723e-05,
"loss": 1.6167,
"step": 835
},
{
"epoch": 0.24168834923388263,
"grad_norm": 1.1257893632889384,
"learning_rate": 1.7418746777126284e-05,
"loss": 1.62,
"step": 836
},
{
"epoch": 0.2419774501300954,
"grad_norm": 1.3179549442602967,
"learning_rate": 1.741259132168801e-05,
"loss": 1.5686,
"step": 837
},
{
"epoch": 0.24226655102630817,
"grad_norm": 1.1244904370304307,
"learning_rate": 1.7406429625995853e-05,
"loss": 1.6366,
"step": 838
},
{
"epoch": 0.24255565192252096,
"grad_norm": 1.2065775280910245,
"learning_rate": 1.7400261695236996e-05,
"loss": 1.7549,
"step": 839
},
{
"epoch": 0.24284475281873374,
"grad_norm": 1.1396498478056365,
"learning_rate": 1.739408753460388e-05,
"loss": 1.552,
"step": 840
},
{
"epoch": 0.24313385371494653,
"grad_norm": 1.1240397902793318,
"learning_rate": 1.73879071492942e-05,
"loss": 1.6687,
"step": 841
},
{
"epoch": 0.2434229546111593,
"grad_norm": 1.179163925851606,
"learning_rate": 1.738172054451088e-05,
"loss": 1.6504,
"step": 842
},
{
"epoch": 0.24371205550737207,
"grad_norm": 1.1030610833520664,
"learning_rate": 1.737552772546208e-05,
"loss": 1.5912,
"step": 843
},
{
"epoch": 0.24400115640358486,
"grad_norm": 1.1838602443986301,
"learning_rate": 1.73693286973612e-05,
"loss": 1.5732,
"step": 844
},
{
"epoch": 0.24429025729979764,
"grad_norm": 1.143992740058496,
"learning_rate": 1.7363123465426853e-05,
"loss": 1.6071,
"step": 845
},
{
"epoch": 0.2445793581960104,
"grad_norm": 1.174590060660084,
"learning_rate": 1.7356912034882893e-05,
"loss": 1.5759,
"step": 846
},
{
"epoch": 0.24486845909222318,
"grad_norm": 1.1329692315930469,
"learning_rate": 1.735069441095838e-05,
"loss": 1.6088,
"step": 847
},
{
"epoch": 0.24515755998843597,
"grad_norm": 1.1350450062350554,
"learning_rate": 1.7344470598887585e-05,
"loss": 1.7202,
"step": 848
},
{
"epoch": 0.24544666088464875,
"grad_norm": 1.181601786302941,
"learning_rate": 1.733824060391e-05,
"loss": 1.7271,
"step": 849
},
{
"epoch": 0.2457357617808615,
"grad_norm": 1.1583792292333024,
"learning_rate": 1.7332004431270312e-05,
"loss": 1.5625,
"step": 850
},
{
"epoch": 0.2460248626770743,
"grad_norm": 1.1568878076104787,
"learning_rate": 1.7325762086218415e-05,
"loss": 1.6503,
"step": 851
},
{
"epoch": 0.24631396357328708,
"grad_norm": 1.2339640973176655,
"learning_rate": 1.7319513574009398e-05,
"loss": 1.7399,
"step": 852
},
{
"epoch": 0.24660306446949987,
"grad_norm": 1.2009335734006834,
"learning_rate": 1.731325889990354e-05,
"loss": 1.5677,
"step": 853
},
{
"epoch": 0.24689216536571262,
"grad_norm": 1.1688999648294327,
"learning_rate": 1.73069980691663e-05,
"loss": 1.6136,
"step": 854
},
{
"epoch": 0.2471812662619254,
"grad_norm": 1.1635714362599143,
"learning_rate": 1.730073108706834e-05,
"loss": 1.6131,
"step": 855
},
{
"epoch": 0.2474703671581382,
"grad_norm": 1.2123420765200588,
"learning_rate": 1.7294457958885482e-05,
"loss": 1.7155,
"step": 856
},
{
"epoch": 0.24775946805435098,
"grad_norm": 1.1434228753959954,
"learning_rate": 1.7288178689898725e-05,
"loss": 1.7325,
"step": 857
},
{
"epoch": 0.24804856895056374,
"grad_norm": 1.2246804177847264,
"learning_rate": 1.728189328539425e-05,
"loss": 1.6505,
"step": 858
},
{
"epoch": 0.24833766984677652,
"grad_norm": 1.1585117368059001,
"learning_rate": 1.7275601750663387e-05,
"loss": 1.5769,
"step": 859
},
{
"epoch": 0.2486267707429893,
"grad_norm": 1.3882387244074696,
"learning_rate": 1.7269304091002633e-05,
"loss": 1.6452,
"step": 860
},
{
"epoch": 0.2489158716392021,
"grad_norm": 1.160203748747366,
"learning_rate": 1.726300031171365e-05,
"loss": 1.5732,
"step": 861
},
{
"epoch": 0.24920497253541485,
"grad_norm": 1.157523734644445,
"learning_rate": 1.7256690418103238e-05,
"loss": 1.6401,
"step": 862
},
{
"epoch": 0.24949407343162763,
"grad_norm": 1.1890152342635811,
"learning_rate": 1.725037441548335e-05,
"loss": 1.6668,
"step": 863
},
{
"epoch": 0.24978317432784042,
"grad_norm": 1.2257536935692366,
"learning_rate": 1.7244052309171087e-05,
"loss": 1.6516,
"step": 864
},
{
"epoch": 0.2500722752240532,
"grad_norm": 1.2023469701707792,
"learning_rate": 1.7237724104488675e-05,
"loss": 1.5962,
"step": 865
},
{
"epoch": 0.250361376120266,
"grad_norm": 1.2280068128337949,
"learning_rate": 1.723138980676349e-05,
"loss": 1.6962,
"step": 866
},
{
"epoch": 0.25065047701647875,
"grad_norm": 1.217449764349956,
"learning_rate": 1.7225049421328024e-05,
"loss": 1.6377,
"step": 867
},
{
"epoch": 0.2509395779126915,
"grad_norm": 1.2841045771510446,
"learning_rate": 1.7218702953519904e-05,
"loss": 1.6582,
"step": 868
},
{
"epoch": 0.2512286788089043,
"grad_norm": 1.2601843986629024,
"learning_rate": 1.7212350408681872e-05,
"loss": 1.6102,
"step": 869
},
{
"epoch": 0.2515177797051171,
"grad_norm": 1.145255827066236,
"learning_rate": 1.7205991792161783e-05,
"loss": 1.6581,
"step": 870
},
{
"epoch": 0.2518068806013299,
"grad_norm": 1.2362667478605363,
"learning_rate": 1.7199627109312615e-05,
"loss": 1.6043,
"step": 871
},
{
"epoch": 0.25209598149754264,
"grad_norm": 1.2401716127785924,
"learning_rate": 1.7193256365492436e-05,
"loss": 1.632,
"step": 872
},
{
"epoch": 0.2523850823937554,
"grad_norm": 1.119783771494612,
"learning_rate": 1.7186879566064433e-05,
"loss": 1.5714,
"step": 873
},
{
"epoch": 0.2526741832899682,
"grad_norm": 1.103085959008315,
"learning_rate": 1.718049671639688e-05,
"loss": 1.6384,
"step": 874
},
{
"epoch": 0.25296328418618097,
"grad_norm": 1.133467746444829,
"learning_rate": 1.717410782186315e-05,
"loss": 1.6366,
"step": 875
},
{
"epoch": 0.2532523850823938,
"grad_norm": 1.175009710061539,
"learning_rate": 1.7167712887841705e-05,
"loss": 1.6428,
"step": 876
},
{
"epoch": 0.25354148597860654,
"grad_norm": 1.1563638245548544,
"learning_rate": 1.716131191971608e-05,
"loss": 1.5624,
"step": 877
},
{
"epoch": 0.2538305868748193,
"grad_norm": 1.116419831991319,
"learning_rate": 1.715490492287491e-05,
"loss": 1.5829,
"step": 878
},
{
"epoch": 0.2541196877710321,
"grad_norm": 1.1370821345076967,
"learning_rate": 1.7148491902711884e-05,
"loss": 1.6631,
"step": 879
},
{
"epoch": 0.25440878866724487,
"grad_norm": 1.1611842225988047,
"learning_rate": 1.7142072864625777e-05,
"loss": 1.5913,
"step": 880
},
{
"epoch": 0.2546978895634576,
"grad_norm": 1.1019030805813086,
"learning_rate": 1.7135647814020425e-05,
"loss": 1.6758,
"step": 881
},
{
"epoch": 0.25498699045967044,
"grad_norm": 1.04613261420445,
"learning_rate": 1.7129216756304723e-05,
"loss": 1.5948,
"step": 882
},
{
"epoch": 0.2552760913558832,
"grad_norm": 1.1936376971157356,
"learning_rate": 1.7122779696892628e-05,
"loss": 1.6552,
"step": 883
},
{
"epoch": 0.255565192252096,
"grad_norm": 1.1593207052719288,
"learning_rate": 1.7116336641203147e-05,
"loss": 1.6959,
"step": 884
},
{
"epoch": 0.25585429314830876,
"grad_norm": 1.1061563744857061,
"learning_rate": 1.7109887594660336e-05,
"loss": 1.6163,
"step": 885
},
{
"epoch": 0.2561433940445215,
"grad_norm": 1.134065834530409,
"learning_rate": 1.710343256269329e-05,
"loss": 1.6726,
"step": 886
},
{
"epoch": 0.25643249494073433,
"grad_norm": 1.1359039495462206,
"learning_rate": 1.7096971550736153e-05,
"loss": 1.5318,
"step": 887
},
{
"epoch": 0.2567215958369471,
"grad_norm": 1.3387153646158823,
"learning_rate": 1.7090504564228087e-05,
"loss": 1.6655,
"step": 888
},
{
"epoch": 0.25701069673315985,
"grad_norm": 1.093747361454369,
"learning_rate": 1.7084031608613297e-05,
"loss": 1.7196,
"step": 889
},
{
"epoch": 0.25729979762937266,
"grad_norm": 1.114071184877325,
"learning_rate": 1.7077552689341012e-05,
"loss": 1.5488,
"step": 890
},
{
"epoch": 0.2575888985255854,
"grad_norm": 1.1616291244384729,
"learning_rate": 1.7071067811865477e-05,
"loss": 1.5431,
"step": 891
},
{
"epoch": 0.25787799942179823,
"grad_norm": 1.1354100602957258,
"learning_rate": 1.7064576981645954e-05,
"loss": 1.6055,
"step": 892
},
{
"epoch": 0.258167100318011,
"grad_norm": 1.1522440785073058,
"learning_rate": 1.7058080204146718e-05,
"loss": 1.629,
"step": 893
},
{
"epoch": 0.25845620121422375,
"grad_norm": 1.1026615179112005,
"learning_rate": 1.7051577484837043e-05,
"loss": 1.5596,
"step": 894
},
{
"epoch": 0.25874530211043656,
"grad_norm": 1.1687730289765201,
"learning_rate": 1.7045068829191218e-05,
"loss": 1.6721,
"step": 895
},
{
"epoch": 0.2590344030066493,
"grad_norm": 1.1074104029137306,
"learning_rate": 1.7038554242688526e-05,
"loss": 1.6789,
"step": 896
},
{
"epoch": 0.2593235039028621,
"grad_norm": 1.1026942328183036,
"learning_rate": 1.7032033730813236e-05,
"loss": 1.558,
"step": 897
},
{
"epoch": 0.2596126047990749,
"grad_norm": 1.1646081156981725,
"learning_rate": 1.7025507299054605e-05,
"loss": 1.601,
"step": 898
},
{
"epoch": 0.25990170569528764,
"grad_norm": 1.1271463774298178,
"learning_rate": 1.7018974952906885e-05,
"loss": 1.6538,
"step": 899
},
{
"epoch": 0.26019080659150046,
"grad_norm": 1.1091390731718525,
"learning_rate": 1.7012436697869295e-05,
"loss": 1.5819,
"step": 900
},
{
"epoch": 0.2604799074877132,
"grad_norm": 1.173447822417614,
"learning_rate": 1.7005892539446035e-05,
"loss": 1.6451,
"step": 901
},
{
"epoch": 0.26076900838392597,
"grad_norm": 1.1350959293058749,
"learning_rate": 1.699934248314627e-05,
"loss": 1.6962,
"step": 902
},
{
"epoch": 0.2610581092801388,
"grad_norm": 1.1162197648231467,
"learning_rate": 1.699278653448414e-05,
"loss": 1.6747,
"step": 903
},
{
"epoch": 0.26134721017635154,
"grad_norm": 1.2139920201386476,
"learning_rate": 1.698622469897873e-05,
"loss": 1.7454,
"step": 904
},
{
"epoch": 0.2616363110725643,
"grad_norm": 1.0734616221718876,
"learning_rate": 1.6979656982154095e-05,
"loss": 1.6061,
"step": 905
},
{
"epoch": 0.2619254119687771,
"grad_norm": 1.1328541465259365,
"learning_rate": 1.6973083389539233e-05,
"loss": 1.585,
"step": 906
},
{
"epoch": 0.26221451286498987,
"grad_norm": 1.1568303460362723,
"learning_rate": 1.6966503926668094e-05,
"loss": 1.6825,
"step": 907
},
{
"epoch": 0.2625036137612027,
"grad_norm": 1.196990001143863,
"learning_rate": 1.6959918599079562e-05,
"loss": 1.6538,
"step": 908
},
{
"epoch": 0.26279271465741544,
"grad_norm": 1.1342440267026168,
"learning_rate": 1.6953327412317468e-05,
"loss": 1.6075,
"step": 909
},
{
"epoch": 0.2630818155536282,
"grad_norm": 1.1596789456175167,
"learning_rate": 1.6946730371930565e-05,
"loss": 1.6929,
"step": 910
},
{
"epoch": 0.263370916449841,
"grad_norm": 1.1373065436703054,
"learning_rate": 1.6940127483472543e-05,
"loss": 1.6481,
"step": 911
},
{
"epoch": 0.26366001734605377,
"grad_norm": 1.1610606493879128,
"learning_rate": 1.6933518752502013e-05,
"loss": 1.7379,
"step": 912
},
{
"epoch": 0.2639491182422666,
"grad_norm": 1.125687311211295,
"learning_rate": 1.69269041845825e-05,
"loss": 1.5849,
"step": 913
},
{
"epoch": 0.26423821913847934,
"grad_norm": 1.1489558599701768,
"learning_rate": 1.692028378528244e-05,
"loss": 1.7226,
"step": 914
},
{
"epoch": 0.2645273200346921,
"grad_norm": 1.188601400208746,
"learning_rate": 1.6913657560175197e-05,
"loss": 1.6326,
"step": 915
},
{
"epoch": 0.2648164209309049,
"grad_norm": 1.113219452373414,
"learning_rate": 1.6907025514839013e-05,
"loss": 1.6478,
"step": 916
},
{
"epoch": 0.26510552182711766,
"grad_norm": 1.1593190572859267,
"learning_rate": 1.6900387654857055e-05,
"loss": 1.6093,
"step": 917
},
{
"epoch": 0.2653946227233304,
"grad_norm": 1.2134229994183705,
"learning_rate": 1.6893743985817366e-05,
"loss": 1.6903,
"step": 918
},
{
"epoch": 0.26568372361954323,
"grad_norm": 1.1993364856075077,
"learning_rate": 1.688709451331289e-05,
"loss": 1.6092,
"step": 919
},
{
"epoch": 0.265972824515756,
"grad_norm": 1.0919639751411407,
"learning_rate": 1.688043924294145e-05,
"loss": 1.6078,
"step": 920
},
{
"epoch": 0.2662619254119688,
"grad_norm": 1.2013320201783475,
"learning_rate": 1.687377818030575e-05,
"loss": 1.6899,
"step": 921
},
{
"epoch": 0.26655102630818156,
"grad_norm": 1.1806015487593893,
"learning_rate": 1.6867111331013384e-05,
"loss": 1.6197,
"step": 922
},
{
"epoch": 0.2668401272043943,
"grad_norm": 1.1020975960157413,
"learning_rate": 1.6860438700676797e-05,
"loss": 1.5893,
"step": 923
},
{
"epoch": 0.26712922810060713,
"grad_norm": 1.094951519307646,
"learning_rate": 1.685376029491331e-05,
"loss": 1.5254,
"step": 924
},
{
"epoch": 0.2674183289968199,
"grad_norm": 1.1163275611243435,
"learning_rate": 1.684707611934511e-05,
"loss": 1.5477,
"step": 925
},
{
"epoch": 0.26770742989303264,
"grad_norm": 1.2307806882838923,
"learning_rate": 1.6840386179599242e-05,
"loss": 1.6667,
"step": 926
},
{
"epoch": 0.26799653078924546,
"grad_norm": 1.171373289157806,
"learning_rate": 1.683369048130759e-05,
"loss": 1.6172,
"step": 927
},
{
"epoch": 0.2682856316854582,
"grad_norm": 1.1886579501994778,
"learning_rate": 1.6826989030106902e-05,
"loss": 1.6133,
"step": 928
},
{
"epoch": 0.268574732581671,
"grad_norm": 1.1934353770717314,
"learning_rate": 1.6820281831638757e-05,
"loss": 1.559,
"step": 929
},
{
"epoch": 0.2688638334778838,
"grad_norm": 1.1614491161854386,
"learning_rate": 1.6813568891549584e-05,
"loss": 1.5769,
"step": 930
},
{
"epoch": 0.26915293437409654,
"grad_norm": 1.1397915171658068,
"learning_rate": 1.680685021549063e-05,
"loss": 1.5537,
"step": 931
},
{
"epoch": 0.26944203527030935,
"grad_norm": 1.149992178338205,
"learning_rate": 1.6800125809117984e-05,
"loss": 1.6231,
"step": 932
},
{
"epoch": 0.2697311361665221,
"grad_norm": 1.1401360403507486,
"learning_rate": 1.6793395678092555e-05,
"loss": 1.6182,
"step": 933
},
{
"epoch": 0.27002023706273487,
"grad_norm": 1.1209347016808902,
"learning_rate": 1.6786659828080078e-05,
"loss": 1.6145,
"step": 934
},
{
"epoch": 0.2703093379589477,
"grad_norm": 1.159654576616978,
"learning_rate": 1.6779918264751082e-05,
"loss": 1.707,
"step": 935
},
{
"epoch": 0.27059843885516044,
"grad_norm": 1.1184081607191674,
"learning_rate": 1.6773170993780926e-05,
"loss": 1.6816,
"step": 936
},
{
"epoch": 0.27088753975137325,
"grad_norm": 1.0985004602757438,
"learning_rate": 1.676641802084977e-05,
"loss": 1.6155,
"step": 937
},
{
"epoch": 0.271176640647586,
"grad_norm": 1.1086581131090065,
"learning_rate": 1.6759659351642563e-05,
"loss": 1.5973,
"step": 938
},
{
"epoch": 0.27146574154379877,
"grad_norm": 1.1262795371219783,
"learning_rate": 1.6752894991849066e-05,
"loss": 1.6819,
"step": 939
},
{
"epoch": 0.2717548424400116,
"grad_norm": 1.1042181463660192,
"learning_rate": 1.674612494716382e-05,
"loss": 1.6242,
"step": 940
},
{
"epoch": 0.27204394333622434,
"grad_norm": 1.1225263911796381,
"learning_rate": 1.6739349223286153e-05,
"loss": 1.688,
"step": 941
},
{
"epoch": 0.2723330442324371,
"grad_norm": 1.0507156428201587,
"learning_rate": 1.673256782592018e-05,
"loss": 1.6293,
"step": 942
},
{
"epoch": 0.2726221451286499,
"grad_norm": 1.1687937216638025,
"learning_rate": 1.6725780760774775e-05,
"loss": 1.5753,
"step": 943
},
{
"epoch": 0.27291124602486266,
"grad_norm": 1.0675977484748183,
"learning_rate": 1.671898803356361e-05,
"loss": 1.5865,
"step": 944
},
{
"epoch": 0.2732003469210755,
"grad_norm": 1.1223391674880248,
"learning_rate": 1.67121896500051e-05,
"loss": 1.6529,
"step": 945
},
{
"epoch": 0.27348944781728823,
"grad_norm": 1.1326572315570196,
"learning_rate": 1.6705385615822436e-05,
"loss": 1.6463,
"step": 946
},
{
"epoch": 0.273778548713501,
"grad_norm": 1.2086326669717427,
"learning_rate": 1.669857593674356e-05,
"loss": 1.6377,
"step": 947
},
{
"epoch": 0.2740676496097138,
"grad_norm": 1.193825641421145,
"learning_rate": 1.6691760618501163e-05,
"loss": 1.5547,
"step": 948
},
{
"epoch": 0.27435675050592656,
"grad_norm": 1.2866576631032545,
"learning_rate": 1.6684939666832695e-05,
"loss": 1.6659,
"step": 949
},
{
"epoch": 0.2746458514021394,
"grad_norm": 1.2166777753299653,
"learning_rate": 1.6678113087480334e-05,
"loss": 1.5869,
"step": 950
},
{
"epoch": 0.27493495229835213,
"grad_norm": 1.192007342337803,
"learning_rate": 1.6671280886191005e-05,
"loss": 1.6139,
"step": 951
},
{
"epoch": 0.2752240531945649,
"grad_norm": 1.1110607953589973,
"learning_rate": 1.6664443068716365e-05,
"loss": 1.5512,
"step": 952
},
{
"epoch": 0.2755131540907777,
"grad_norm": 1.1519768318705965,
"learning_rate": 1.6657599640812793e-05,
"loss": 1.5454,
"step": 953
},
{
"epoch": 0.27580225498699046,
"grad_norm": 1.1479317865206893,
"learning_rate": 1.6650750608241403e-05,
"loss": 1.6038,
"step": 954
},
{
"epoch": 0.2760913558832032,
"grad_norm": 1.0950679969123849,
"learning_rate": 1.664389597676801e-05,
"loss": 1.5197,
"step": 955
},
{
"epoch": 0.27638045677941603,
"grad_norm": 1.1421306294859326,
"learning_rate": 1.6637035752163166e-05,
"loss": 1.7071,
"step": 956
},
{
"epoch": 0.2766695576756288,
"grad_norm": 1.0604403244112353,
"learning_rate": 1.66301699402021e-05,
"loss": 1.6066,
"step": 957
},
{
"epoch": 0.2769586585718416,
"grad_norm": 1.1297323126616883,
"learning_rate": 1.6623298546664773e-05,
"loss": 1.7718,
"step": 958
},
{
"epoch": 0.27724775946805436,
"grad_norm": 1.0910744186175767,
"learning_rate": 1.6616421577335834e-05,
"loss": 1.6962,
"step": 959
},
{
"epoch": 0.2775368603642671,
"grad_norm": 1.12747611212375,
"learning_rate": 1.6609539038004624e-05,
"loss": 1.7292,
"step": 960
},
{
"epoch": 0.2778259612604799,
"grad_norm": 1.093202597412715,
"learning_rate": 1.6602650934465178e-05,
"loss": 1.6183,
"step": 961
},
{
"epoch": 0.2781150621566927,
"grad_norm": 1.072162280018483,
"learning_rate": 1.659575727251621e-05,
"loss": 1.5427,
"step": 962
},
{
"epoch": 0.27840416305290544,
"grad_norm": 1.1781301893929088,
"learning_rate": 1.658885805796111e-05,
"loss": 1.6114,
"step": 963
},
{
"epoch": 0.27869326394911825,
"grad_norm": 1.153766159493101,
"learning_rate": 1.6581953296607963e-05,
"loss": 1.6674,
"step": 964
},
{
"epoch": 0.278982364845331,
"grad_norm": 1.1586767056723473,
"learning_rate": 1.657504299426949e-05,
"loss": 1.5837,
"step": 965
},
{
"epoch": 0.2792714657415438,
"grad_norm": 1.0755813818211648,
"learning_rate": 1.656812715676311e-05,
"loss": 1.6153,
"step": 966
},
{
"epoch": 0.2795605666377566,
"grad_norm": 1.2638124252742915,
"learning_rate": 1.6561205789910885e-05,
"loss": 1.5646,
"step": 967
},
{
"epoch": 0.27984966753396934,
"grad_norm": 1.0904862968899787,
"learning_rate": 1.655427889953953e-05,
"loss": 1.6286,
"step": 968
},
{
"epoch": 0.28013876843018215,
"grad_norm": 1.1573153681833863,
"learning_rate": 1.6547346491480414e-05,
"loss": 1.6379,
"step": 969
},
{
"epoch": 0.2804278693263949,
"grad_norm": 1.1816273912861235,
"learning_rate": 1.654040857156955e-05,
"loss": 1.5763,
"step": 970
},
{
"epoch": 0.28071697022260766,
"grad_norm": 1.1643158633524044,
"learning_rate": 1.6533465145647598e-05,
"loss": 1.6681,
"step": 971
},
{
"epoch": 0.2810060711188205,
"grad_norm": 1.156818680500087,
"learning_rate": 1.6526516219559843e-05,
"loss": 1.5255,
"step": 972
},
{
"epoch": 0.28129517201503323,
"grad_norm": 1.1426079298705811,
"learning_rate": 1.6519561799156202e-05,
"loss": 1.5607,
"step": 973
},
{
"epoch": 0.28158427291124605,
"grad_norm": 1.0995905315062742,
"learning_rate": 1.6512601890291226e-05,
"loss": 1.5456,
"step": 974
},
{
"epoch": 0.2818733738074588,
"grad_norm": 1.1899775942969868,
"learning_rate": 1.6505636498824074e-05,
"loss": 1.6684,
"step": 975
},
{
"epoch": 0.28216247470367156,
"grad_norm": 1.155528745251187,
"learning_rate": 1.6498665630618532e-05,
"loss": 1.5868,
"step": 976
},
{
"epoch": 0.2824515755998844,
"grad_norm": 1.1411174440976906,
"learning_rate": 1.649168929154299e-05,
"loss": 1.4712,
"step": 977
},
{
"epoch": 0.28274067649609713,
"grad_norm": 1.0860228511069425,
"learning_rate": 1.6484707487470447e-05,
"loss": 1.662,
"step": 978
},
{
"epoch": 0.2830297773923099,
"grad_norm": 1.1466851119233112,
"learning_rate": 1.6477720224278493e-05,
"loss": 1.6142,
"step": 979
},
{
"epoch": 0.2833188782885227,
"grad_norm": 1.3078811221895994,
"learning_rate": 1.6470727507849332e-05,
"loss": 1.6702,
"step": 980
},
{
"epoch": 0.28360797918473546,
"grad_norm": 1.1996795619215384,
"learning_rate": 1.6463729344069736e-05,
"loss": 1.6115,
"step": 981
},
{
"epoch": 0.28389708008094827,
"grad_norm": 1.2042463666525114,
"learning_rate": 1.645672573883109e-05,
"loss": 1.6333,
"step": 982
},
{
"epoch": 0.28418618097716103,
"grad_norm": 1.1063042405661339,
"learning_rate": 1.6449716698029335e-05,
"loss": 1.6195,
"step": 983
},
{
"epoch": 0.2844752818733738,
"grad_norm": 1.1072288580101002,
"learning_rate": 1.6442702227564996e-05,
"loss": 1.6116,
"step": 984
},
{
"epoch": 0.2847643827695866,
"grad_norm": 1.2243676300508977,
"learning_rate": 1.643568233334318e-05,
"loss": 1.5821,
"step": 985
},
{
"epoch": 0.28505348366579936,
"grad_norm": 1.1664544235823486,
"learning_rate": 1.642865702127354e-05,
"loss": 1.671,
"step": 986
},
{
"epoch": 0.28534258456201217,
"grad_norm": 1.1167634055353126,
"learning_rate": 1.6421626297270316e-05,
"loss": 1.6361,
"step": 987
},
{
"epoch": 0.2856316854582249,
"grad_norm": 1.1612972833032917,
"learning_rate": 1.6414590167252273e-05,
"loss": 1.5859,
"step": 988
},
{
"epoch": 0.2859207863544377,
"grad_norm": 1.213672115046863,
"learning_rate": 1.6407548637142746e-05,
"loss": 1.5757,
"step": 989
},
{
"epoch": 0.2862098872506505,
"grad_norm": 1.1300532059127562,
"learning_rate": 1.640050171286962e-05,
"loss": 1.6489,
"step": 990
},
{
"epoch": 0.28649898814686325,
"grad_norm": 1.1475160145568735,
"learning_rate": 1.6393449400365307e-05,
"loss": 1.5792,
"step": 991
},
{
"epoch": 0.286788089043076,
"grad_norm": 1.171976265527746,
"learning_rate": 1.6386391705566768e-05,
"loss": 1.5027,
"step": 992
},
{
"epoch": 0.2870771899392888,
"grad_norm": 1.1698090854087668,
"learning_rate": 1.6379328634415484e-05,
"loss": 1.681,
"step": 993
},
{
"epoch": 0.2873662908355016,
"grad_norm": 1.1317960093108423,
"learning_rate": 1.6372260192857465e-05,
"loss": 1.603,
"step": 994
},
{
"epoch": 0.2876553917317144,
"grad_norm": 1.179188272729989,
"learning_rate": 1.636518638684325e-05,
"loss": 1.6851,
"step": 995
},
{
"epoch": 0.28794449262792715,
"grad_norm": 1.1079735999761406,
"learning_rate": 1.6358107222327884e-05,
"loss": 1.6323,
"step": 996
},
{
"epoch": 0.2882335935241399,
"grad_norm": 1.1148610947340667,
"learning_rate": 1.635102270527093e-05,
"loss": 1.5511,
"step": 997
},
{
"epoch": 0.2885226944203527,
"grad_norm": 1.184333908207028,
"learning_rate": 1.6343932841636455e-05,
"loss": 1.6631,
"step": 998
},
{
"epoch": 0.2888117953165655,
"grad_norm": 1.1154370991272007,
"learning_rate": 1.6336837637393025e-05,
"loss": 1.6557,
"step": 999
},
{
"epoch": 0.28910089621277824,
"grad_norm": 1.1055845000132498,
"learning_rate": 1.63297370985137e-05,
"loss": 1.623,
"step": 1000
},
{
"epoch": 0.28938999710899105,
"grad_norm": 1.1478774668713727,
"learning_rate": 1.6322631230976035e-05,
"loss": 1.6303,
"step": 1001
},
{
"epoch": 0.2896790980052038,
"grad_norm": 1.1260832092736042,
"learning_rate": 1.6315520040762073e-05,
"loss": 1.6627,
"step": 1002
},
{
"epoch": 0.2899681989014166,
"grad_norm": 1.1328217900445974,
"learning_rate": 1.6308403533858334e-05,
"loss": 1.6185,
"step": 1003
},
{
"epoch": 0.2902572997976294,
"grad_norm": 1.1217019928925396,
"learning_rate": 1.6301281716255814e-05,
"loss": 1.5887,
"step": 1004
},
{
"epoch": 0.29054640069384213,
"grad_norm": 1.0662817504674387,
"learning_rate": 1.629415459394998e-05,
"loss": 1.6385,
"step": 1005
},
{
"epoch": 0.29083550159005495,
"grad_norm": 1.0503730461501608,
"learning_rate": 1.6287022172940762e-05,
"loss": 1.6164,
"step": 1006
},
{
"epoch": 0.2911246024862677,
"grad_norm": 1.0733421544977206,
"learning_rate": 1.627988445923256e-05,
"loss": 1.6689,
"step": 1007
},
{
"epoch": 0.29141370338248046,
"grad_norm": 1.0667066239811298,
"learning_rate": 1.6272741458834224e-05,
"loss": 1.6697,
"step": 1008
},
{
"epoch": 0.2917028042786933,
"grad_norm": 1.087739247251025,
"learning_rate": 1.6265593177759046e-05,
"loss": 1.6863,
"step": 1009
},
{
"epoch": 0.29199190517490603,
"grad_norm": 1.0713166761734145,
"learning_rate": 1.625843962202478e-05,
"loss": 1.5898,
"step": 1010
},
{
"epoch": 0.29228100607111884,
"grad_norm": 1.1902495194222464,
"learning_rate": 1.6251280797653606e-05,
"loss": 1.6618,
"step": 1011
},
{
"epoch": 0.2925701069673316,
"grad_norm": 1.125478902548264,
"learning_rate": 1.624411671067215e-05,
"loss": 1.5907,
"step": 1012
},
{
"epoch": 0.29285920786354436,
"grad_norm": 1.1078237145953016,
"learning_rate": 1.623694736711146e-05,
"loss": 1.7451,
"step": 1013
},
{
"epoch": 0.29314830875975717,
"grad_norm": 1.1051877093548326,
"learning_rate": 1.6229772773007016e-05,
"loss": 1.6274,
"step": 1014
},
{
"epoch": 0.2934374096559699,
"grad_norm": 1.121714398039267,
"learning_rate": 1.6222592934398715e-05,
"loss": 1.4989,
"step": 1015
},
{
"epoch": 0.2937265105521827,
"grad_norm": 1.0925330870668526,
"learning_rate": 1.6215407857330862e-05,
"loss": 1.5753,
"step": 1016
},
{
"epoch": 0.2940156114483955,
"grad_norm": 1.086137363905831,
"learning_rate": 1.620821754785219e-05,
"loss": 1.5898,
"step": 1017
},
{
"epoch": 0.29430471234460825,
"grad_norm": 1.1237702975359138,
"learning_rate": 1.6201022012015823e-05,
"loss": 1.7232,
"step": 1018
},
{
"epoch": 0.29459381324082107,
"grad_norm": 1.1254688780372208,
"learning_rate": 1.619382125587928e-05,
"loss": 1.6816,
"step": 1019
},
{
"epoch": 0.2948829141370338,
"grad_norm": 1.123440253692758,
"learning_rate": 1.6186615285504496e-05,
"loss": 1.6185,
"step": 1020
},
{
"epoch": 0.2951720150332466,
"grad_norm": 1.0947286679961508,
"learning_rate": 1.617940410695777e-05,
"loss": 1.6673,
"step": 1021
},
{
"epoch": 0.2954611159294594,
"grad_norm": 1.1147257497620007,
"learning_rate": 1.6172187726309802e-05,
"loss": 1.5691,
"step": 1022
},
{
"epoch": 0.29575021682567215,
"grad_norm": 1.2128326700223733,
"learning_rate": 1.6164966149635664e-05,
"loss": 1.5773,
"step": 1023
},
{
"epoch": 0.29603931772188496,
"grad_norm": 1.1251444723700124,
"learning_rate": 1.615773938301481e-05,
"loss": 1.6696,
"step": 1024
},
{
"epoch": 0.2963284186180977,
"grad_norm": 1.1144709330002471,
"learning_rate": 1.615050743253106e-05,
"loss": 1.5903,
"step": 1025
},
{
"epoch": 0.2966175195143105,
"grad_norm": 1.114572930209085,
"learning_rate": 1.6143270304272588e-05,
"loss": 1.5259,
"step": 1026
},
{
"epoch": 0.2969066204105233,
"grad_norm": 1.1041968816612577,
"learning_rate": 1.613602800433194e-05,
"loss": 1.594,
"step": 1027
},
{
"epoch": 0.29719572130673605,
"grad_norm": 1.1309087615970796,
"learning_rate": 1.6128780538806012e-05,
"loss": 1.6098,
"step": 1028
},
{
"epoch": 0.2974848222029488,
"grad_norm": 1.0655925882162895,
"learning_rate": 1.6121527913796045e-05,
"loss": 1.5776,
"step": 1029
},
{
"epoch": 0.2977739230991616,
"grad_norm": 1.153738047328653,
"learning_rate": 1.611427013540763e-05,
"loss": 1.6305,
"step": 1030
},
{
"epoch": 0.2980630239953744,
"grad_norm": 1.1277739002652585,
"learning_rate": 1.6107007209750687e-05,
"loss": 1.5917,
"step": 1031
},
{
"epoch": 0.2983521248915872,
"grad_norm": 1.1292106867822904,
"learning_rate": 1.6099739142939477e-05,
"loss": 1.644,
"step": 1032
},
{
"epoch": 0.29864122578779995,
"grad_norm": 1.1236415518463128,
"learning_rate": 1.6092465941092585e-05,
"loss": 1.6763,
"step": 1033
},
{
"epoch": 0.2989303266840127,
"grad_norm": 1.119641797142331,
"learning_rate": 1.6085187610332925e-05,
"loss": 1.6655,
"step": 1034
},
{
"epoch": 0.2992194275802255,
"grad_norm": 1.1300850404882916,
"learning_rate": 1.607790415678772e-05,
"loss": 1.6187,
"step": 1035
},
{
"epoch": 0.2995085284764383,
"grad_norm": 1.1047280483132156,
"learning_rate": 1.6070615586588516e-05,
"loss": 1.6469,
"step": 1036
},
{
"epoch": 0.29979762937265103,
"grad_norm": 1.152691255287852,
"learning_rate": 1.6063321905871152e-05,
"loss": 1.665,
"step": 1037
},
{
"epoch": 0.30008673026886384,
"grad_norm": 1.169423494531767,
"learning_rate": 1.605602312077578e-05,
"loss": 1.6282,
"step": 1038
},
{
"epoch": 0.3003758311650766,
"grad_norm": 1.2677425058482785,
"learning_rate": 1.6048719237446852e-05,
"loss": 1.6314,
"step": 1039
},
{
"epoch": 0.3006649320612894,
"grad_norm": 1.1668503548868798,
"learning_rate": 1.60414102620331e-05,
"loss": 1.6291,
"step": 1040
},
{
"epoch": 0.30095403295750217,
"grad_norm": 1.1367727503019929,
"learning_rate": 1.6034096200687552e-05,
"loss": 1.4852,
"step": 1041
},
{
"epoch": 0.30124313385371493,
"grad_norm": 1.1323984808855299,
"learning_rate": 1.6026777059567515e-05,
"loss": 1.5893,
"step": 1042
},
{
"epoch": 0.30153223474992774,
"grad_norm": 1.1609097650981914,
"learning_rate": 1.601945284483457e-05,
"loss": 1.5886,
"step": 1043
},
{
"epoch": 0.3018213356461405,
"grad_norm": 1.1415713259757136,
"learning_rate": 1.601212356265458e-05,
"loss": 1.7002,
"step": 1044
},
{
"epoch": 0.30211043654235326,
"grad_norm": 1.227231895248573,
"learning_rate": 1.6004789219197656e-05,
"loss": 1.5416,
"step": 1045
},
{
"epoch": 0.30239953743856607,
"grad_norm": 1.163043113422494,
"learning_rate": 1.599744982063818e-05,
"loss": 1.6469,
"step": 1046
},
{
"epoch": 0.3026886383347788,
"grad_norm": 1.1126723523837783,
"learning_rate": 1.5990105373154793e-05,
"loss": 1.6411,
"step": 1047
},
{
"epoch": 0.30297773923099164,
"grad_norm": 1.122459840411915,
"learning_rate": 1.5982755882930382e-05,
"loss": 1.6519,
"step": 1048
},
{
"epoch": 0.3032668401272044,
"grad_norm": 1.5840082766344463,
"learning_rate": 1.5975401356152074e-05,
"loss": 1.6135,
"step": 1049
},
{
"epoch": 0.30355594102341715,
"grad_norm": 1.1225058692910097,
"learning_rate": 1.596804179901125e-05,
"loss": 1.5674,
"step": 1050
},
{
"epoch": 0.30384504191962997,
"grad_norm": 1.1620815072786224,
"learning_rate": 1.5960677217703512e-05,
"loss": 1.5796,
"step": 1051
},
{
"epoch": 0.3041341428158427,
"grad_norm": 1.1236988795621041,
"learning_rate": 1.59533076184287e-05,
"loss": 1.6219,
"step": 1052
},
{
"epoch": 0.3044232437120555,
"grad_norm": 1.1118641361809816,
"learning_rate": 1.594593300739087e-05,
"loss": 1.6241,
"step": 1053
},
{
"epoch": 0.3047123446082683,
"grad_norm": 1.1947464614628307,
"learning_rate": 1.5938553390798306e-05,
"loss": 1.5793,
"step": 1054
},
{
"epoch": 0.30500144550448105,
"grad_norm": 1.0683464597665702,
"learning_rate": 1.59311687748635e-05,
"loss": 1.59,
"step": 1055
},
{
"epoch": 0.30529054640069386,
"grad_norm": 1.148680753466922,
"learning_rate": 1.5923779165803163e-05,
"loss": 1.5368,
"step": 1056
},
{
"epoch": 0.3055796472969066,
"grad_norm": 1.0975947872426266,
"learning_rate": 1.591638456983819e-05,
"loss": 1.5634,
"step": 1057
},
{
"epoch": 0.3058687481931194,
"grad_norm": 1.0758466082542466,
"learning_rate": 1.5908984993193696e-05,
"loss": 1.5855,
"step": 1058
},
{
"epoch": 0.3061578490893322,
"grad_norm": 1.140426199545744,
"learning_rate": 1.590158044209897e-05,
"loss": 1.6559,
"step": 1059
},
{
"epoch": 0.30644694998554495,
"grad_norm": 1.1060228312495028,
"learning_rate": 1.58941709227875e-05,
"loss": 1.6378,
"step": 1060
},
{
"epoch": 0.30673605088175776,
"grad_norm": 1.113526257443253,
"learning_rate": 1.588675644149695e-05,
"loss": 1.6179,
"step": 1061
},
{
"epoch": 0.3070251517779705,
"grad_norm": 1.2173138936643453,
"learning_rate": 1.587933700446918e-05,
"loss": 1.6806,
"step": 1062
},
{
"epoch": 0.3073142526741833,
"grad_norm": 1.1400857891987628,
"learning_rate": 1.587191261795019e-05,
"loss": 1.6742,
"step": 1063
},
{
"epoch": 0.3076033535703961,
"grad_norm": 1.1305997594773096,
"learning_rate": 1.5864483288190174e-05,
"loss": 1.5272,
"step": 1064
},
{
"epoch": 0.30789245446660884,
"grad_norm": 1.228007225532647,
"learning_rate": 1.5857049021443467e-05,
"loss": 1.7073,
"step": 1065
},
{
"epoch": 0.3081815553628216,
"grad_norm": 1.162916715072549,
"learning_rate": 1.584960982396858e-05,
"loss": 1.633,
"step": 1066
},
{
"epoch": 0.3084706562590344,
"grad_norm": 1.1597744213298864,
"learning_rate": 1.5842165702028157e-05,
"loss": 1.4521,
"step": 1067
},
{
"epoch": 0.30875975715524717,
"grad_norm": 1.091472479025558,
"learning_rate": 1.5834716661889004e-05,
"loss": 1.5902,
"step": 1068
},
{
"epoch": 0.30904885805146,
"grad_norm": 1.1086742754929058,
"learning_rate": 1.5827262709822056e-05,
"loss": 1.5939,
"step": 1069
},
{
"epoch": 0.30933795894767274,
"grad_norm": 1.1593052089546423,
"learning_rate": 1.581980385210238e-05,
"loss": 1.5925,
"step": 1070
},
{
"epoch": 0.3096270598438855,
"grad_norm": 1.1650634297424989,
"learning_rate": 1.5812340095009192e-05,
"loss": 1.6855,
"step": 1071
},
{
"epoch": 0.3099161607400983,
"grad_norm": 1.099899286022252,
"learning_rate": 1.580487144482581e-05,
"loss": 1.5522,
"step": 1072
},
{
"epoch": 0.31020526163631107,
"grad_norm": 1.1052999796757328,
"learning_rate": 1.5797397907839678e-05,
"loss": 1.5938,
"step": 1073
},
{
"epoch": 0.3104943625325238,
"grad_norm": 1.1060803844176064,
"learning_rate": 1.5789919490342367e-05,
"loss": 1.5835,
"step": 1074
},
{
"epoch": 0.31078346342873664,
"grad_norm": 1.1187707670977645,
"learning_rate": 1.578243619862954e-05,
"loss": 1.5327,
"step": 1075
},
{
"epoch": 0.3110725643249494,
"grad_norm": 1.1521574761443172,
"learning_rate": 1.5774948039000966e-05,
"loss": 1.5998,
"step": 1076
},
{
"epoch": 0.3113616652211622,
"grad_norm": 1.109080218315599,
"learning_rate": 1.5767455017760524e-05,
"loss": 1.6479,
"step": 1077
},
{
"epoch": 0.31165076611737497,
"grad_norm": 1.1370919220061753,
"learning_rate": 1.5759957141216175e-05,
"loss": 1.5246,
"step": 1078
},
{
"epoch": 0.3119398670135877,
"grad_norm": 1.113530924054974,
"learning_rate": 1.5752454415679968e-05,
"loss": 1.6073,
"step": 1079
},
{
"epoch": 0.31222896790980054,
"grad_norm": 1.0973243348785886,
"learning_rate": 1.5744946847468036e-05,
"loss": 1.694,
"step": 1080
},
{
"epoch": 0.3125180688060133,
"grad_norm": 1.1647355097669179,
"learning_rate": 1.5737434442900585e-05,
"loss": 1.6135,
"step": 1081
},
{
"epoch": 0.31280716970222605,
"grad_norm": 1.1314628251550698,
"learning_rate": 1.5729917208301905e-05,
"loss": 1.7268,
"step": 1082
},
{
"epoch": 0.31309627059843886,
"grad_norm": 1.1557900599799165,
"learning_rate": 1.5722395150000336e-05,
"loss": 1.6416,
"step": 1083
},
{
"epoch": 0.3133853714946516,
"grad_norm": 1.0866193980050511,
"learning_rate": 1.5714868274328293e-05,
"loss": 1.5172,
"step": 1084
},
{
"epoch": 0.31367447239086443,
"grad_norm": 1.158830944861789,
"learning_rate": 1.5707336587622232e-05,
"loss": 1.6342,
"step": 1085
},
{
"epoch": 0.3139635732870772,
"grad_norm": 1.1695982419960425,
"learning_rate": 1.569980009622267e-05,
"loss": 1.6118,
"step": 1086
},
{
"epoch": 0.31425267418328995,
"grad_norm": 1.095428236784029,
"learning_rate": 1.5692258806474168e-05,
"loss": 1.5944,
"step": 1087
},
{
"epoch": 0.31454177507950276,
"grad_norm": 1.1019964357061058,
"learning_rate": 1.568471272472532e-05,
"loss": 1.573,
"step": 1088
},
{
"epoch": 0.3148308759757155,
"grad_norm": 1.1256111784675789,
"learning_rate": 1.567716185732876e-05,
"loss": 1.604,
"step": 1089
},
{
"epoch": 0.3151199768719283,
"grad_norm": 1.1148637100070335,
"learning_rate": 1.566960621064115e-05,
"loss": 1.617,
"step": 1090
},
{
"epoch": 0.3154090777681411,
"grad_norm": 1.0804672597638105,
"learning_rate": 1.566204579102317e-05,
"loss": 1.5047,
"step": 1091
},
{
"epoch": 0.31569817866435385,
"grad_norm": 1.1419136802627774,
"learning_rate": 1.5654480604839532e-05,
"loss": 1.5378,
"step": 1092
},
{
"epoch": 0.31598727956056666,
"grad_norm": 1.1498293898506018,
"learning_rate": 1.564691065845894e-05,
"loss": 1.5626,
"step": 1093
},
{
"epoch": 0.3162763804567794,
"grad_norm": 1.1143430311083644,
"learning_rate": 1.5639335958254123e-05,
"loss": 1.595,
"step": 1094
},
{
"epoch": 0.3165654813529922,
"grad_norm": 1.0931970046054698,
"learning_rate": 1.5631756510601802e-05,
"loss": 1.4775,
"step": 1095
},
{
"epoch": 0.316854582249205,
"grad_norm": 1.105514673067743,
"learning_rate": 1.5624172321882703e-05,
"loss": 1.6032,
"step": 1096
},
{
"epoch": 0.31714368314541774,
"grad_norm": 1.0931739627772648,
"learning_rate": 1.5616583398481533e-05,
"loss": 1.6174,
"step": 1097
},
{
"epoch": 0.31743278404163056,
"grad_norm": 1.1211144837250133,
"learning_rate": 1.560898974678699e-05,
"loss": 1.7499,
"step": 1098
},
{
"epoch": 0.3177218849378433,
"grad_norm": 1.1209699013931562,
"learning_rate": 1.5601391373191757e-05,
"loss": 1.4995,
"step": 1099
},
{
"epoch": 0.31801098583405607,
"grad_norm": 1.0922080473768103,
"learning_rate": 1.559378828409249e-05,
"loss": 1.5329,
"step": 1100
},
{
"epoch": 0.3183000867302689,
"grad_norm": 1.1365989497096085,
"learning_rate": 1.5586180485889803e-05,
"loss": 1.6751,
"step": 1101
},
{
"epoch": 0.31858918762648164,
"grad_norm": 1.1489329053278985,
"learning_rate": 1.5578567984988292e-05,
"loss": 1.6953,
"step": 1102
},
{
"epoch": 0.3188782885226944,
"grad_norm": 1.1566994177367973,
"learning_rate": 1.5570950787796498e-05,
"loss": 1.6297,
"step": 1103
},
{
"epoch": 0.3191673894189072,
"grad_norm": 1.1580906580853507,
"learning_rate": 1.5563328900726925e-05,
"loss": 1.5271,
"step": 1104
},
{
"epoch": 0.31945649031511997,
"grad_norm": 1.0995229482590019,
"learning_rate": 1.5555702330196024e-05,
"loss": 1.6119,
"step": 1105
},
{
"epoch": 0.3197455912113328,
"grad_norm": 1.0799883018230365,
"learning_rate": 1.554807108262418e-05,
"loss": 1.6782,
"step": 1106
},
{
"epoch": 0.32003469210754554,
"grad_norm": 1.1073029882869814,
"learning_rate": 1.5540435164435726e-05,
"loss": 1.5397,
"step": 1107
},
{
"epoch": 0.3203237930037583,
"grad_norm": 1.1329749767157702,
"learning_rate": 1.5532794582058922e-05,
"loss": 1.7132,
"step": 1108
},
{
"epoch": 0.3206128938999711,
"grad_norm": 1.1204291824946706,
"learning_rate": 1.5525149341925953e-05,
"loss": 1.7194,
"step": 1109
},
{
"epoch": 0.32090199479618386,
"grad_norm": 1.1098026934922063,
"learning_rate": 1.5517499450472927e-05,
"loss": 1.5843,
"step": 1110
},
{
"epoch": 0.3211910956923966,
"grad_norm": 1.1014699968821742,
"learning_rate": 1.550984491413987e-05,
"loss": 1.6016,
"step": 1111
},
{
"epoch": 0.32148019658860943,
"grad_norm": 1.130886787299436,
"learning_rate": 1.5502185739370717e-05,
"loss": 1.5793,
"step": 1112
},
{
"epoch": 0.3217692974848222,
"grad_norm": 1.1290955593656395,
"learning_rate": 1.5494521932613307e-05,
"loss": 1.6501,
"step": 1113
},
{
"epoch": 0.322058398381035,
"grad_norm": 1.0883652426866905,
"learning_rate": 1.548685350031937e-05,
"loss": 1.5592,
"step": 1114
},
{
"epoch": 0.32234749927724776,
"grad_norm": 1.1464074008441614,
"learning_rate": 1.547918044894455e-05,
"loss": 1.6295,
"step": 1115
},
{
"epoch": 0.3226366001734605,
"grad_norm": 1.0848408826768916,
"learning_rate": 1.5471502784948364e-05,
"loss": 1.6202,
"step": 1116
},
{
"epoch": 0.32292570106967333,
"grad_norm": 1.150507963992238,
"learning_rate": 1.5463820514794215e-05,
"loss": 1.5854,
"step": 1117
},
{
"epoch": 0.3232148019658861,
"grad_norm": 1.0976974430882895,
"learning_rate": 1.545613364494938e-05,
"loss": 1.5758,
"step": 1118
},
{
"epoch": 0.32350390286209885,
"grad_norm": 1.1069482166625944,
"learning_rate": 1.5448442181885026e-05,
"loss": 1.6208,
"step": 1119
},
{
"epoch": 0.32379300375831166,
"grad_norm": 1.2077191144956074,
"learning_rate": 1.5440746132076155e-05,
"loss": 1.5925,
"step": 1120
},
{
"epoch": 0.3240821046545244,
"grad_norm": 1.1541833705064752,
"learning_rate": 1.5433045502001668e-05,
"loss": 1.6509,
"step": 1121
},
{
"epoch": 0.32437120555073723,
"grad_norm": 1.1100819739423928,
"learning_rate": 1.542534029814429e-05,
"loss": 1.5918,
"step": 1122
},
{
"epoch": 0.32466030644695,
"grad_norm": 1.1765209537252967,
"learning_rate": 1.5417630526990613e-05,
"loss": 1.6111,
"step": 1123
},
{
"epoch": 0.32494940734316274,
"grad_norm": 1.092648262023873,
"learning_rate": 1.540991619503107e-05,
"loss": 1.5288,
"step": 1124
},
{
"epoch": 0.32523850823937556,
"grad_norm": 1.1276180188457035,
"learning_rate": 1.540219730875994e-05,
"loss": 1.5382,
"step": 1125
},
{
"epoch": 0.3255276091355883,
"grad_norm": 1.571590596776389,
"learning_rate": 1.5394473874675325e-05,
"loss": 1.6576,
"step": 1126
},
{
"epoch": 0.32581671003180107,
"grad_norm": 1.118162272885637,
"learning_rate": 1.5386745899279155e-05,
"loss": 1.5767,
"step": 1127
},
{
"epoch": 0.3261058109280139,
"grad_norm": 1.0863276376658753,
"learning_rate": 1.5379013389077193e-05,
"loss": 1.5751,
"step": 1128
},
{
"epoch": 0.32639491182422664,
"grad_norm": 1.1100894550632194,
"learning_rate": 1.537127635057902e-05,
"loss": 1.4374,
"step": 1129
},
{
"epoch": 0.32668401272043945,
"grad_norm": 1.086813502970389,
"learning_rate": 1.5363534790298012e-05,
"loss": 1.6202,
"step": 1130
},
{
"epoch": 0.3269731136166522,
"grad_norm": 1.1464824890024201,
"learning_rate": 1.5355788714751378e-05,
"loss": 1.5852,
"step": 1131
},
{
"epoch": 0.32726221451286497,
"grad_norm": 1.1665253096605663,
"learning_rate": 1.5348038130460106e-05,
"loss": 1.6235,
"step": 1132
},
{
"epoch": 0.3275513154090778,
"grad_norm": 1.137364189884065,
"learning_rate": 1.534028304394899e-05,
"loss": 1.5424,
"step": 1133
},
{
"epoch": 0.32784041630529054,
"grad_norm": 1.1333000047594932,
"learning_rate": 1.5332523461746607e-05,
"loss": 1.5362,
"step": 1134
},
{
"epoch": 0.32812951720150335,
"grad_norm": 1.189175584600463,
"learning_rate": 1.5324759390385326e-05,
"loss": 1.6452,
"step": 1135
},
{
"epoch": 0.3284186180977161,
"grad_norm": 1.8321030345914788,
"learning_rate": 1.53169908364013e-05,
"loss": 1.7045,
"step": 1136
},
{
"epoch": 0.32870771899392887,
"grad_norm": 1.1179090119797042,
"learning_rate": 1.530921780633444e-05,
"loss": 1.5984,
"step": 1137
},
{
"epoch": 0.3289968198901417,
"grad_norm": 1.1414953551831817,
"learning_rate": 1.5301440306728433e-05,
"loss": 1.6185,
"step": 1138
},
{
"epoch": 0.32928592078635444,
"grad_norm": 1.036221101321721,
"learning_rate": 1.5293658344130734e-05,
"loss": 1.5744,
"step": 1139
},
{
"epoch": 0.3295750216825672,
"grad_norm": 1.5416356009448273,
"learning_rate": 1.5285871925092545e-05,
"loss": 1.6635,
"step": 1140
},
{
"epoch": 0.32986412257878,
"grad_norm": 1.2162733684436655,
"learning_rate": 1.527808105616883e-05,
"loss": 1.6407,
"step": 1141
},
{
"epoch": 0.33015322347499276,
"grad_norm": 1.1033030184596553,
"learning_rate": 1.5270285743918294e-05,
"loss": 1.6466,
"step": 1142
},
{
"epoch": 0.3304423243712056,
"grad_norm": 1.109124489223676,
"learning_rate": 1.526248599490338e-05,
"loss": 1.7746,
"step": 1143
},
{
"epoch": 0.33073142526741833,
"grad_norm": 1.092288021297295,
"learning_rate": 1.5254681815690271e-05,
"loss": 1.526,
"step": 1144
},
{
"epoch": 0.3310205261636311,
"grad_norm": 1.1575352020094662,
"learning_rate": 1.5246873212848874e-05,
"loss": 1.5878,
"step": 1145
},
{
"epoch": 0.3313096270598439,
"grad_norm": 1.1405141791110793,
"learning_rate": 1.5239060192952821e-05,
"loss": 1.5774,
"step": 1146
},
{
"epoch": 0.33159872795605666,
"grad_norm": 1.122962325771197,
"learning_rate": 1.5231242762579474e-05,
"loss": 1.5913,
"step": 1147
},
{
"epoch": 0.3318878288522694,
"grad_norm": 1.2373901697470753,
"learning_rate": 1.5223420928309889e-05,
"loss": 1.6305,
"step": 1148
},
{
"epoch": 0.33217692974848223,
"grad_norm": 1.091007710090704,
"learning_rate": 1.5215594696728849e-05,
"loss": 1.6023,
"step": 1149
},
{
"epoch": 0.332466030644695,
"grad_norm": 1.1020358370583065,
"learning_rate": 1.5207764074424818e-05,
"loss": 1.5833,
"step": 1150
},
{
"epoch": 0.3327551315409078,
"grad_norm": 1.1069948831247647,
"learning_rate": 1.5199929067989975e-05,
"loss": 1.6207,
"step": 1151
},
{
"epoch": 0.33304423243712056,
"grad_norm": 1.15867755196439,
"learning_rate": 1.5192089684020179e-05,
"loss": 1.6841,
"step": 1152
},
{
"epoch": 0.3333333333333333,
"grad_norm": 1.06051478046549,
"learning_rate": 1.518424592911498e-05,
"loss": 1.6747,
"step": 1153
},
{
"epoch": 0.3336224342295461,
"grad_norm": 1.0941722623539405,
"learning_rate": 1.5176397809877599e-05,
"loss": 1.5649,
"step": 1154
},
{
"epoch": 0.3339115351257589,
"grad_norm": 1.065308694755837,
"learning_rate": 1.5168545332914942e-05,
"loss": 1.5981,
"step": 1155
},
{
"epoch": 0.33420063602197164,
"grad_norm": 1.103949052624009,
"learning_rate": 1.5160688504837577e-05,
"loss": 1.6367,
"step": 1156
},
{
"epoch": 0.33448973691818445,
"grad_norm": 1.108624129747703,
"learning_rate": 1.5152827332259738e-05,
"loss": 1.5322,
"step": 1157
},
{
"epoch": 0.3347788378143972,
"grad_norm": 1.171699585963367,
"learning_rate": 1.5144961821799314e-05,
"loss": 1.6257,
"step": 1158
},
{
"epoch": 0.33506793871061,
"grad_norm": 1.1189026609089276,
"learning_rate": 1.5137091980077842e-05,
"loss": 1.6154,
"step": 1159
},
{
"epoch": 0.3353570396068228,
"grad_norm": 1.0695939123636227,
"learning_rate": 1.5129217813720516e-05,
"loss": 1.6156,
"step": 1160
},
{
"epoch": 0.33564614050303554,
"grad_norm": 1.1139701552322105,
"learning_rate": 1.5121339329356163e-05,
"loss": 1.6639,
"step": 1161
},
{
"epoch": 0.33593524139924835,
"grad_norm": 1.0899522793859586,
"learning_rate": 1.5113456533617243e-05,
"loss": 1.5977,
"step": 1162
},
{
"epoch": 0.3362243422954611,
"grad_norm": 1.1231652645265944,
"learning_rate": 1.5105569433139859e-05,
"loss": 1.5887,
"step": 1163
},
{
"epoch": 0.33651344319167387,
"grad_norm": 1.1311200256557958,
"learning_rate": 1.5097678034563716e-05,
"loss": 1.6756,
"step": 1164
},
{
"epoch": 0.3368025440878867,
"grad_norm": 1.0808514264048,
"learning_rate": 1.5089782344532155e-05,
"loss": 1.649,
"step": 1165
},
{
"epoch": 0.33709164498409944,
"grad_norm": 1.1031304339670052,
"learning_rate": 1.5081882369692129e-05,
"loss": 1.6388,
"step": 1166
},
{
"epoch": 0.33738074588031225,
"grad_norm": 1.0793600842313416,
"learning_rate": 1.507397811669418e-05,
"loss": 1.5254,
"step": 1167
},
{
"epoch": 0.337669846776525,
"grad_norm": 1.2081295628157755,
"learning_rate": 1.5066069592192483e-05,
"loss": 1.6703,
"step": 1168
},
{
"epoch": 0.33795894767273776,
"grad_norm": 1.0883959306735564,
"learning_rate": 1.5058156802844774e-05,
"loss": 1.579,
"step": 1169
},
{
"epoch": 0.3382480485689506,
"grad_norm": 1.1328433977876502,
"learning_rate": 1.5050239755312405e-05,
"loss": 1.5702,
"step": 1170
},
{
"epoch": 0.33853714946516333,
"grad_norm": 1.1355213706257294,
"learning_rate": 1.5042318456260305e-05,
"loss": 1.6607,
"step": 1171
},
{
"epoch": 0.33882625036137615,
"grad_norm": 1.1058142406765221,
"learning_rate": 1.5034392912356973e-05,
"loss": 1.6631,
"step": 1172
},
{
"epoch": 0.3391153512575889,
"grad_norm": 1.063531469536154,
"learning_rate": 1.50264631302745e-05,
"loss": 1.6077,
"step": 1173
},
{
"epoch": 0.33940445215380166,
"grad_norm": 1.127116435978979,
"learning_rate": 1.5018529116688526e-05,
"loss": 1.585,
"step": 1174
},
{
"epoch": 0.3396935530500145,
"grad_norm": 1.0953876410839178,
"learning_rate": 1.5010590878278266e-05,
"loss": 1.5929,
"step": 1175
},
{
"epoch": 0.33998265394622723,
"grad_norm": 1.0550371976819637,
"learning_rate": 1.5002648421726487e-05,
"loss": 1.5637,
"step": 1176
},
{
"epoch": 0.34027175484244,
"grad_norm": 1.0884184120480185,
"learning_rate": 1.4994701753719505e-05,
"loss": 1.5982,
"step": 1177
},
{
"epoch": 0.3405608557386528,
"grad_norm": 1.114897410077523,
"learning_rate": 1.4986750880947184e-05,
"loss": 1.6062,
"step": 1178
},
{
"epoch": 0.34084995663486556,
"grad_norm": 1.066891980414234,
"learning_rate": 1.497879581010293e-05,
"loss": 1.606,
"step": 1179
},
{
"epoch": 0.34113905753107837,
"grad_norm": 1.0880910736641207,
"learning_rate": 1.4970836547883681e-05,
"loss": 1.5708,
"step": 1180
},
{
"epoch": 0.34142815842729113,
"grad_norm": 1.1167707203657147,
"learning_rate": 1.49628731009899e-05,
"loss": 1.5817,
"step": 1181
},
{
"epoch": 0.3417172593235039,
"grad_norm": 1.1339896953914148,
"learning_rate": 1.4954905476125578e-05,
"loss": 1.6754,
"step": 1182
},
{
"epoch": 0.3420063602197167,
"grad_norm": 1.1010220576391134,
"learning_rate": 1.494693367999822e-05,
"loss": 1.5575,
"step": 1183
},
{
"epoch": 0.34229546111592946,
"grad_norm": 1.2739889022885835,
"learning_rate": 1.4938957719318845e-05,
"loss": 1.5497,
"step": 1184
},
{
"epoch": 0.3425845620121422,
"grad_norm": 1.148232650591867,
"learning_rate": 1.4930977600801977e-05,
"loss": 1.5948,
"step": 1185
},
{
"epoch": 0.342873662908355,
"grad_norm": 1.1837885057183477,
"learning_rate": 1.4922993331165639e-05,
"loss": 1.6017,
"step": 1186
},
{
"epoch": 0.3431627638045678,
"grad_norm": 1.1050078395263963,
"learning_rate": 1.4915004917131345e-05,
"loss": 1.6225,
"step": 1187
},
{
"epoch": 0.3434518647007806,
"grad_norm": 1.0565673964453821,
"learning_rate": 1.4907012365424112e-05,
"loss": 1.6723,
"step": 1188
},
{
"epoch": 0.34374096559699335,
"grad_norm": 1.0426217055278246,
"learning_rate": 1.4899015682772422e-05,
"loss": 1.5567,
"step": 1189
},
{
"epoch": 0.3440300664932061,
"grad_norm": 1.0750133699973452,
"learning_rate": 1.489101487590825e-05,
"loss": 1.5984,
"step": 1190
},
{
"epoch": 0.3443191673894189,
"grad_norm": 1.0444023205829724,
"learning_rate": 1.488300995156703e-05,
"loss": 1.524,
"step": 1191
},
{
"epoch": 0.3446082682856317,
"grad_norm": 1.1113297257235197,
"learning_rate": 1.4875000916487674e-05,
"loss": 1.6477,
"step": 1192
},
{
"epoch": 0.34489736918184444,
"grad_norm": 1.0700189539478588,
"learning_rate": 1.4866987777412548e-05,
"loss": 1.6747,
"step": 1193
},
{
"epoch": 0.34518647007805725,
"grad_norm": 1.0791046704423155,
"learning_rate": 1.485897054108747e-05,
"loss": 1.5364,
"step": 1194
},
{
"epoch": 0.34547557097427,
"grad_norm": 1.1152714216930488,
"learning_rate": 1.4850949214261717e-05,
"loss": 1.6436,
"step": 1195
},
{
"epoch": 0.3457646718704828,
"grad_norm": 1.1163524885739,
"learning_rate": 1.4842923803687999e-05,
"loss": 1.6719,
"step": 1196
},
{
"epoch": 0.3460537727666956,
"grad_norm": 1.1470433328057497,
"learning_rate": 1.483489431612247e-05,
"loss": 1.6618,
"step": 1197
},
{
"epoch": 0.34634287366290833,
"grad_norm": 1.1152658216672549,
"learning_rate": 1.482686075832472e-05,
"loss": 1.6082,
"step": 1198
},
{
"epoch": 0.34663197455912115,
"grad_norm": 1.087512082279524,
"learning_rate": 1.4818823137057751e-05,
"loss": 1.6302,
"step": 1199
},
{
"epoch": 0.3469210754553339,
"grad_norm": 1.1269711018498454,
"learning_rate": 1.4810781459088009e-05,
"loss": 1.6265,
"step": 1200
},
{
"epoch": 0.34721017635154666,
"grad_norm": 1.1738148200035237,
"learning_rate": 1.4802735731185327e-05,
"loss": 1.7348,
"step": 1201
},
{
"epoch": 0.3474992772477595,
"grad_norm": 1.0479144362222594,
"learning_rate": 1.4794685960122967e-05,
"loss": 1.5731,
"step": 1202
},
{
"epoch": 0.34778837814397223,
"grad_norm": 1.1183507786337057,
"learning_rate": 1.4786632152677596e-05,
"loss": 1.6627,
"step": 1203
},
{
"epoch": 0.34807747904018504,
"grad_norm": 1.0560834768468748,
"learning_rate": 1.4778574315629262e-05,
"loss": 1.5118,
"step": 1204
},
{
"epoch": 0.3483665799363978,
"grad_norm": 1.1504897519151327,
"learning_rate": 1.4770512455761425e-05,
"loss": 1.6384,
"step": 1205
},
{
"epoch": 0.34865568083261056,
"grad_norm": 1.128971905615821,
"learning_rate": 1.476244657986092e-05,
"loss": 1.6184,
"step": 1206
},
{
"epoch": 0.34894478172882337,
"grad_norm": 1.1323411355011963,
"learning_rate": 1.4754376694717958e-05,
"loss": 1.742,
"step": 1207
},
{
"epoch": 0.34923388262503613,
"grad_norm": 1.0865319922492744,
"learning_rate": 1.4746302807126142e-05,
"loss": 1.5589,
"step": 1208
},
{
"epoch": 0.34952298352124894,
"grad_norm": 1.1013771826316385,
"learning_rate": 1.473822492388243e-05,
"loss": 1.5835,
"step": 1209
},
{
"epoch": 0.3498120844174617,
"grad_norm": 1.133500512314171,
"learning_rate": 1.4730143051787151e-05,
"loss": 1.5905,
"step": 1210
},
{
"epoch": 0.35010118531367446,
"grad_norm": 1.0512972757957857,
"learning_rate": 1.4722057197643986e-05,
"loss": 1.5644,
"step": 1211
},
{
"epoch": 0.35039028620988727,
"grad_norm": 1.0982556984479055,
"learning_rate": 1.4713967368259981e-05,
"loss": 1.617,
"step": 1212
},
{
"epoch": 0.3506793871061,
"grad_norm": 1.1056551102940748,
"learning_rate": 1.4705873570445506e-05,
"loss": 1.586,
"step": 1213
},
{
"epoch": 0.3509684880023128,
"grad_norm": 1.1194856012781043,
"learning_rate": 1.4697775811014294e-05,
"loss": 1.6349,
"step": 1214
},
{
"epoch": 0.3512575888985256,
"grad_norm": 1.1635585093324126,
"learning_rate": 1.4689674096783404e-05,
"loss": 1.7592,
"step": 1215
},
{
"epoch": 0.35154668979473835,
"grad_norm": 1.0761660420536625,
"learning_rate": 1.4681568434573221e-05,
"loss": 1.6587,
"step": 1216
},
{
"epoch": 0.35183579069095117,
"grad_norm": 1.0862955186287286,
"learning_rate": 1.4673458831207463e-05,
"loss": 1.6006,
"step": 1217
},
{
"epoch": 0.3521248915871639,
"grad_norm": 1.0784753202499149,
"learning_rate": 1.4665345293513154e-05,
"loss": 1.5591,
"step": 1218
},
{
"epoch": 0.3524139924833767,
"grad_norm": 1.1153278904671513,
"learning_rate": 1.4657227828320637e-05,
"loss": 1.5676,
"step": 1219
},
{
"epoch": 0.3527030933795895,
"grad_norm": 1.0768215016106648,
"learning_rate": 1.4649106442463565e-05,
"loss": 1.5335,
"step": 1220
},
{
"epoch": 0.35299219427580225,
"grad_norm": 1.1314763789684028,
"learning_rate": 1.4640981142778883e-05,
"loss": 1.6171,
"step": 1221
},
{
"epoch": 0.353281295172015,
"grad_norm": 1.0990493963931165,
"learning_rate": 1.4632851936106838e-05,
"loss": 1.6048,
"step": 1222
},
{
"epoch": 0.3535703960682278,
"grad_norm": 1.085058079934556,
"learning_rate": 1.462471882929096e-05,
"loss": 1.6296,
"step": 1223
},
{
"epoch": 0.3538594969644406,
"grad_norm": 1.132646612791557,
"learning_rate": 1.4616581829178065e-05,
"loss": 1.6962,
"step": 1224
},
{
"epoch": 0.3541485978606534,
"grad_norm": 1.08766264140374,
"learning_rate": 1.4608440942618252e-05,
"loss": 1.5844,
"step": 1225
},
{
"epoch": 0.35443769875686615,
"grad_norm": 1.0655552716344114,
"learning_rate": 1.4600296176464878e-05,
"loss": 1.6268,
"step": 1226
},
{
"epoch": 0.3547267996530789,
"grad_norm": 1.0715746687267362,
"learning_rate": 1.4592147537574585e-05,
"loss": 1.7251,
"step": 1227
},
{
"epoch": 0.3550159005492917,
"grad_norm": 1.153862024978196,
"learning_rate": 1.4583995032807259e-05,
"loss": 1.5546,
"step": 1228
},
{
"epoch": 0.3553050014455045,
"grad_norm": 1.0940461925978813,
"learning_rate": 1.4575838669026045e-05,
"loss": 1.5783,
"step": 1229
},
{
"epoch": 0.35559410234171723,
"grad_norm": 1.088784612051587,
"learning_rate": 1.4567678453097345e-05,
"loss": 1.6016,
"step": 1230
},
{
"epoch": 0.35588320323793005,
"grad_norm": 1.1233954959369794,
"learning_rate": 1.4559514391890788e-05,
"loss": 1.642,
"step": 1231
},
{
"epoch": 0.3561723041341428,
"grad_norm": 1.0795613752451976,
"learning_rate": 1.4551346492279257e-05,
"loss": 1.5359,
"step": 1232
},
{
"epoch": 0.3564614050303556,
"grad_norm": 1.1164982431881545,
"learning_rate": 1.4543174761138852e-05,
"loss": 1.537,
"step": 1233
},
{
"epoch": 0.3567505059265684,
"grad_norm": 1.0439396898471973,
"learning_rate": 1.4534999205348909e-05,
"loss": 1.5957,
"step": 1234
},
{
"epoch": 0.35703960682278113,
"grad_norm": 1.094478459597531,
"learning_rate": 1.4526819831791983e-05,
"loss": 1.5261,
"step": 1235
},
{
"epoch": 0.35732870771899394,
"grad_norm": 1.0523765417826885,
"learning_rate": 1.451863664735383e-05,
"loss": 1.7109,
"step": 1236
},
{
"epoch": 0.3576178086152067,
"grad_norm": 1.0697972681513481,
"learning_rate": 1.4510449658923431e-05,
"loss": 1.5079,
"step": 1237
},
{
"epoch": 0.3579069095114195,
"grad_norm": 1.0776837202479659,
"learning_rate": 1.4502258873392963e-05,
"loss": 1.5373,
"step": 1238
},
{
"epoch": 0.35819601040763227,
"grad_norm": 1.0948923626052913,
"learning_rate": 1.4494064297657795e-05,
"loss": 1.5953,
"step": 1239
},
{
"epoch": 0.358485111303845,
"grad_norm": 1.0851502014434444,
"learning_rate": 1.4485865938616495e-05,
"loss": 1.6227,
"step": 1240
},
{
"epoch": 0.35877421220005784,
"grad_norm": 1.1036122017363585,
"learning_rate": 1.4477663803170807e-05,
"loss": 1.6181,
"step": 1241
},
{
"epoch": 0.3590633130962706,
"grad_norm": 1.073741412650114,
"learning_rate": 1.446945789822566e-05,
"loss": 1.6449,
"step": 1242
},
{
"epoch": 0.35935241399248335,
"grad_norm": 1.1071745305977874,
"learning_rate": 1.446124823068916e-05,
"loss": 1.6455,
"step": 1243
},
{
"epoch": 0.35964151488869617,
"grad_norm": 1.1469504663679086,
"learning_rate": 1.445303480747257e-05,
"loss": 1.5989,
"step": 1244
},
{
"epoch": 0.3599306157849089,
"grad_norm": 1.1129037111462516,
"learning_rate": 1.4444817635490322e-05,
"loss": 1.6613,
"step": 1245
},
{
"epoch": 0.36021971668112174,
"grad_norm": 1.0968054047932645,
"learning_rate": 1.4436596721660002e-05,
"loss": 1.601,
"step": 1246
},
{
"epoch": 0.3605088175773345,
"grad_norm": 1.0927563331941559,
"learning_rate": 1.4428372072902344e-05,
"loss": 1.7164,
"step": 1247
},
{
"epoch": 0.36079791847354725,
"grad_norm": 1.1272073503734452,
"learning_rate": 1.4420143696141233e-05,
"loss": 1.5526,
"step": 1248
},
{
"epoch": 0.36108701936976006,
"grad_norm": 1.0615706091543697,
"learning_rate": 1.4411911598303688e-05,
"loss": 1.5344,
"step": 1249
},
{
"epoch": 0.3613761202659728,
"grad_norm": 1.0434496363748602,
"learning_rate": 1.4403675786319856e-05,
"loss": 1.5325,
"step": 1250
},
{
"epoch": 0.3616652211621856,
"grad_norm": 1.1158624940443362,
"learning_rate": 1.4395436267123017e-05,
"loss": 1.6085,
"step": 1251
},
{
"epoch": 0.3619543220583984,
"grad_norm": 1.1545246739231232,
"learning_rate": 1.4387193047649572e-05,
"loss": 1.6314,
"step": 1252
},
{
"epoch": 0.36224342295461115,
"grad_norm": 1.1500739858412437,
"learning_rate": 1.437894613483903e-05,
"loss": 1.5861,
"step": 1253
},
{
"epoch": 0.36253252385082396,
"grad_norm": 1.021310863594055,
"learning_rate": 1.4370695535634024e-05,
"loss": 1.509,
"step": 1254
},
{
"epoch": 0.3628216247470367,
"grad_norm": 1.086222036655496,
"learning_rate": 1.4362441256980271e-05,
"loss": 1.6284,
"step": 1255
},
{
"epoch": 0.3631107256432495,
"grad_norm": 1.120109032505505,
"learning_rate": 1.4354183305826601e-05,
"loss": 1.5861,
"step": 1256
},
{
"epoch": 0.3633998265394623,
"grad_norm": 1.0968623723093054,
"learning_rate": 1.4345921689124928e-05,
"loss": 1.6583,
"step": 1257
},
{
"epoch": 0.36368892743567505,
"grad_norm": 1.0757768358923314,
"learning_rate": 1.4337656413830252e-05,
"loss": 1.6732,
"step": 1258
},
{
"epoch": 0.3639780283318878,
"grad_norm": 1.1072185273450617,
"learning_rate": 1.4329387486900659e-05,
"loss": 1.5613,
"step": 1259
},
{
"epoch": 0.3642671292281006,
"grad_norm": 1.1152570699608948,
"learning_rate": 1.4321114915297298e-05,
"loss": 1.7207,
"step": 1260
},
{
"epoch": 0.3645562301243134,
"grad_norm": 1.1181703343533083,
"learning_rate": 1.4312838705984403e-05,
"loss": 1.5774,
"step": 1261
},
{
"epoch": 0.3648453310205262,
"grad_norm": 1.1234819294312621,
"learning_rate": 1.4304558865929251e-05,
"loss": 1.5807,
"step": 1262
},
{
"epoch": 0.36513443191673894,
"grad_norm": 1.1065221175707145,
"learning_rate": 1.429627540210219e-05,
"loss": 1.5762,
"step": 1263
},
{
"epoch": 0.3654235328129517,
"grad_norm": 1.061140154200443,
"learning_rate": 1.4287988321476615e-05,
"loss": 1.6927,
"step": 1264
},
{
"epoch": 0.3657126337091645,
"grad_norm": 1.0999054498119034,
"learning_rate": 1.4279697631028961e-05,
"loss": 1.5921,
"step": 1265
},
{
"epoch": 0.36600173460537727,
"grad_norm": 1.1096047939134852,
"learning_rate": 1.4271403337738706e-05,
"loss": 1.5883,
"step": 1266
},
{
"epoch": 0.36629083550159003,
"grad_norm": 1.117311000405351,
"learning_rate": 1.4263105448588362e-05,
"loss": 1.4585,
"step": 1267
},
{
"epoch": 0.36657993639780284,
"grad_norm": 1.1164044045072312,
"learning_rate": 1.4254803970563464e-05,
"loss": 1.6306,
"step": 1268
},
{
"epoch": 0.3668690372940156,
"grad_norm": 1.0628514308268988,
"learning_rate": 1.4246498910652573e-05,
"loss": 1.5824,
"step": 1269
},
{
"epoch": 0.3671581381902284,
"grad_norm": 1.181064137470451,
"learning_rate": 1.4238190275847262e-05,
"loss": 1.6634,
"step": 1270
},
{
"epoch": 0.36744723908644117,
"grad_norm": 1.0887767912574196,
"learning_rate": 1.4229878073142114e-05,
"loss": 1.5741,
"step": 1271
},
{
"epoch": 0.3677363399826539,
"grad_norm": 1.140612642718485,
"learning_rate": 1.4221562309534722e-05,
"loss": 1.6199,
"step": 1272
},
{
"epoch": 0.36802544087886674,
"grad_norm": 1.1783854536403464,
"learning_rate": 1.4213242992025662e-05,
"loss": 1.6202,
"step": 1273
},
{
"epoch": 0.3683145417750795,
"grad_norm": 1.2403828595576905,
"learning_rate": 1.4204920127618517e-05,
"loss": 1.652,
"step": 1274
},
{
"epoch": 0.3686036426712923,
"grad_norm": 1.14066458994363,
"learning_rate": 1.419659372331985e-05,
"loss": 1.7794,
"step": 1275
},
{
"epoch": 0.36889274356750507,
"grad_norm": 1.093343920812435,
"learning_rate": 1.41882637861392e-05,
"loss": 1.5567,
"step": 1276
},
{
"epoch": 0.3691818444637178,
"grad_norm": 1.0808707505493302,
"learning_rate": 1.4179930323089088e-05,
"loss": 1.5926,
"step": 1277
},
{
"epoch": 0.36947094535993064,
"grad_norm": 1.0509905823110333,
"learning_rate": 1.4171593341184997e-05,
"loss": 1.5042,
"step": 1278
},
{
"epoch": 0.3697600462561434,
"grad_norm": 1.1944099137159172,
"learning_rate": 1.4163252847445375e-05,
"loss": 1.6389,
"step": 1279
},
{
"epoch": 0.37004914715235615,
"grad_norm": 1.0550018481082393,
"learning_rate": 1.4154908848891626e-05,
"loss": 1.6389,
"step": 1280
},
{
"epoch": 0.37033824804856896,
"grad_norm": 1.144736846632199,
"learning_rate": 1.4146561352548109e-05,
"loss": 1.5658,
"step": 1281
},
{
"epoch": 0.3706273489447817,
"grad_norm": 1.1625366943570545,
"learning_rate": 1.4138210365442113e-05,
"loss": 1.6057,
"step": 1282
},
{
"epoch": 0.37091644984099453,
"grad_norm": 1.1240282635958534,
"learning_rate": 1.4129855894603885e-05,
"loss": 1.6278,
"step": 1283
},
{
"epoch": 0.3712055507372073,
"grad_norm": 1.120499316106146,
"learning_rate": 1.4121497947066593e-05,
"loss": 1.5968,
"step": 1284
},
{
"epoch": 0.37149465163342005,
"grad_norm": 1.1248421514320672,
"learning_rate": 1.4113136529866331e-05,
"loss": 1.6972,
"step": 1285
},
{
"epoch": 0.37178375252963286,
"grad_norm": 1.1229415098527773,
"learning_rate": 1.4104771650042122e-05,
"loss": 1.5947,
"step": 1286
},
{
"epoch": 0.3720728534258456,
"grad_norm": 1.0792013166290457,
"learning_rate": 1.4096403314635897e-05,
"loss": 1.5431,
"step": 1287
},
{
"epoch": 0.3723619543220584,
"grad_norm": 1.1584266878751586,
"learning_rate": 1.4088031530692497e-05,
"loss": 1.579,
"step": 1288
},
{
"epoch": 0.3726510552182712,
"grad_norm": 1.1181226193149765,
"learning_rate": 1.4079656305259673e-05,
"loss": 1.5989,
"step": 1289
},
{
"epoch": 0.37294015611448394,
"grad_norm": 1.139645588978396,
"learning_rate": 1.407127764538806e-05,
"loss": 1.5868,
"step": 1290
},
{
"epoch": 0.37322925701069676,
"grad_norm": 1.126265249898816,
"learning_rate": 1.40628955581312e-05,
"loss": 1.6933,
"step": 1291
},
{
"epoch": 0.3735183579069095,
"grad_norm": 1.0295263942219146,
"learning_rate": 1.4054510050545505e-05,
"loss": 1.587,
"step": 1292
},
{
"epoch": 0.3738074588031223,
"grad_norm": 1.1795604406769806,
"learning_rate": 1.4046121129690278e-05,
"loss": 1.6097,
"step": 1293
},
{
"epoch": 0.3740965596993351,
"grad_norm": 1.1403420688505401,
"learning_rate": 1.4037728802627693e-05,
"loss": 1.6267,
"step": 1294
},
{
"epoch": 0.37438566059554784,
"grad_norm": 1.0993080419061012,
"learning_rate": 1.4029333076422782e-05,
"loss": 1.671,
"step": 1295
},
{
"epoch": 0.3746747614917606,
"grad_norm": 1.1780757010441563,
"learning_rate": 1.4020933958143455e-05,
"loss": 1.7137,
"step": 1296
},
{
"epoch": 0.3749638623879734,
"grad_norm": 1.1243870924066819,
"learning_rate": 1.4012531454860464e-05,
"loss": 1.7021,
"step": 1297
},
{
"epoch": 0.37525296328418617,
"grad_norm": 1.0999261176344182,
"learning_rate": 1.4004125573647414e-05,
"loss": 1.5531,
"step": 1298
},
{
"epoch": 0.375542064180399,
"grad_norm": 1.1292673013365175,
"learning_rate": 1.399571632158076e-05,
"loss": 1.5297,
"step": 1299
},
{
"epoch": 0.37583116507661174,
"grad_norm": 1.0839707027340657,
"learning_rate": 1.3987303705739787e-05,
"loss": 1.6223,
"step": 1300
},
{
"epoch": 0.3761202659728245,
"grad_norm": 1.1689455340590824,
"learning_rate": 1.3978887733206614e-05,
"loss": 1.602,
"step": 1301
},
{
"epoch": 0.3764093668690373,
"grad_norm": 1.037131778360143,
"learning_rate": 1.3970468411066187e-05,
"loss": 1.5195,
"step": 1302
},
{
"epoch": 0.37669846776525007,
"grad_norm": 1.099003065082928,
"learning_rate": 1.3962045746406273e-05,
"loss": 1.5807,
"step": 1303
},
{
"epoch": 0.3769875686614628,
"grad_norm": 1.1281144891295398,
"learning_rate": 1.395361974631745e-05,
"loss": 1.6533,
"step": 1304
},
{
"epoch": 0.37727666955767564,
"grad_norm": 1.1355144650861142,
"learning_rate": 1.3945190417893106e-05,
"loss": 1.5925,
"step": 1305
},
{
"epoch": 0.3775657704538884,
"grad_norm": 1.095859210884497,
"learning_rate": 1.3936757768229428e-05,
"loss": 1.6053,
"step": 1306
},
{
"epoch": 0.3778548713501012,
"grad_norm": 1.1282184354527713,
"learning_rate": 1.39283218044254e-05,
"loss": 1.6052,
"step": 1307
},
{
"epoch": 0.37814397224631396,
"grad_norm": 1.0831491499806725,
"learning_rate": 1.3919882533582804e-05,
"loss": 1.5087,
"step": 1308
},
{
"epoch": 0.3784330731425267,
"grad_norm": 1.116233937615999,
"learning_rate": 1.3911439962806192e-05,
"loss": 1.5589,
"step": 1309
},
{
"epoch": 0.37872217403873953,
"grad_norm": 1.0633282902202397,
"learning_rate": 1.3902994099202904e-05,
"loss": 1.6164,
"step": 1310
},
{
"epoch": 0.3790112749349523,
"grad_norm": 1.0952692825870283,
"learning_rate": 1.3894544949883046e-05,
"loss": 1.7074,
"step": 1311
},
{
"epoch": 0.3793003758311651,
"grad_norm": 1.0824385592753576,
"learning_rate": 1.3886092521959497e-05,
"loss": 1.5499,
"step": 1312
},
{
"epoch": 0.37958947672737786,
"grad_norm": 1.0480969133383606,
"learning_rate": 1.3877636822547894e-05,
"loss": 1.6255,
"step": 1313
},
{
"epoch": 0.3798785776235906,
"grad_norm": 1.1345684718964264,
"learning_rate": 1.386917785876662e-05,
"loss": 1.6102,
"step": 1314
},
{
"epoch": 0.38016767851980343,
"grad_norm": 1.1080824270324598,
"learning_rate": 1.3860715637736817e-05,
"loss": 1.5263,
"step": 1315
},
{
"epoch": 0.3804567794160162,
"grad_norm": 1.059203635427344,
"learning_rate": 1.3852250166582364e-05,
"loss": 1.6089,
"step": 1316
},
{
"epoch": 0.38074588031222895,
"grad_norm": 1.087510027682475,
"learning_rate": 1.3843781452429875e-05,
"loss": 1.541,
"step": 1317
},
{
"epoch": 0.38103498120844176,
"grad_norm": 1.086247443147945,
"learning_rate": 1.3835309502408696e-05,
"loss": 1.632,
"step": 1318
},
{
"epoch": 0.3813240821046545,
"grad_norm": 1.1441906470265728,
"learning_rate": 1.3826834323650899e-05,
"loss": 1.6891,
"step": 1319
},
{
"epoch": 0.38161318300086733,
"grad_norm": 1.1166198316687277,
"learning_rate": 1.381835592329127e-05,
"loss": 1.5583,
"step": 1320
},
{
"epoch": 0.3819022838970801,
"grad_norm": 1.0406271039215775,
"learning_rate": 1.380987430846731e-05,
"loss": 1.5878,
"step": 1321
},
{
"epoch": 0.38219138479329284,
"grad_norm": 1.1782259448538297,
"learning_rate": 1.3801389486319223e-05,
"loss": 1.5041,
"step": 1322
},
{
"epoch": 0.38248048568950566,
"grad_norm": 1.119306255364395,
"learning_rate": 1.3792901463989918e-05,
"loss": 1.668,
"step": 1323
},
{
"epoch": 0.3827695865857184,
"grad_norm": 1.0695978855808004,
"learning_rate": 1.3784410248624996e-05,
"loss": 1.6487,
"step": 1324
},
{
"epoch": 0.38305868748193117,
"grad_norm": 1.0616454533037485,
"learning_rate": 1.377591584737274e-05,
"loss": 1.5401,
"step": 1325
},
{
"epoch": 0.383347788378144,
"grad_norm": 1.0774865869206818,
"learning_rate": 1.3767418267384128e-05,
"loss": 1.6066,
"step": 1326
},
{
"epoch": 0.38363688927435674,
"grad_norm": 1.060694994399504,
"learning_rate": 1.3758917515812798e-05,
"loss": 1.6073,
"step": 1327
},
{
"epoch": 0.38392599017056955,
"grad_norm": 1.0903641939801798,
"learning_rate": 1.3750413599815074e-05,
"loss": 1.6114,
"step": 1328
},
{
"epoch": 0.3842150910667823,
"grad_norm": 1.114947234018338,
"learning_rate": 1.3741906526549928e-05,
"loss": 1.6328,
"step": 1329
},
{
"epoch": 0.38450419196299507,
"grad_norm": 1.1067463745265371,
"learning_rate": 1.3733396303179005e-05,
"loss": 1.6358,
"step": 1330
},
{
"epoch": 0.3847932928592079,
"grad_norm": 1.1577228624063416,
"learning_rate": 1.3724882936866596e-05,
"loss": 1.5758,
"step": 1331
},
{
"epoch": 0.38508239375542064,
"grad_norm": 1.1558637576676867,
"learning_rate": 1.3716366434779628e-05,
"loss": 1.572,
"step": 1332
},
{
"epoch": 0.3853714946516334,
"grad_norm": 1.1143169382091267,
"learning_rate": 1.3707846804087686e-05,
"loss": 1.6113,
"step": 1333
},
{
"epoch": 0.3856605955478462,
"grad_norm": 1.130544176598161,
"learning_rate": 1.3699324051962974e-05,
"loss": 1.5409,
"step": 1334
},
{
"epoch": 0.38594969644405897,
"grad_norm": 1.0366446457013492,
"learning_rate": 1.3690798185580337e-05,
"loss": 1.5622,
"step": 1335
},
{
"epoch": 0.3862387973402718,
"grad_norm": 1.1288006310703098,
"learning_rate": 1.3682269212117223e-05,
"loss": 1.6841,
"step": 1336
},
{
"epoch": 0.38652789823648454,
"grad_norm": 1.1063560145710902,
"learning_rate": 1.3673737138753713e-05,
"loss": 1.6939,
"step": 1337
},
{
"epoch": 0.3868169991326973,
"grad_norm": 1.1459823915842076,
"learning_rate": 1.3665201972672492e-05,
"loss": 1.6378,
"step": 1338
},
{
"epoch": 0.3871061000289101,
"grad_norm": 1.1039738007856277,
"learning_rate": 1.3656663721058844e-05,
"loss": 1.5349,
"step": 1339
},
{
"epoch": 0.38739520092512286,
"grad_norm": 1.1237038119054343,
"learning_rate": 1.3648122391100663e-05,
"loss": 1.5913,
"step": 1340
},
{
"epoch": 0.3876843018213356,
"grad_norm": 1.0705587484764698,
"learning_rate": 1.3639577989988417e-05,
"loss": 1.5015,
"step": 1341
},
{
"epoch": 0.38797340271754843,
"grad_norm": 1.1425898222093245,
"learning_rate": 1.3631030524915171e-05,
"loss": 1.638,
"step": 1342
},
{
"epoch": 0.3882625036137612,
"grad_norm": 1.138358366597531,
"learning_rate": 1.362248000307657e-05,
"loss": 1.5921,
"step": 1343
},
{
"epoch": 0.388551604509974,
"grad_norm": 1.0937645241618539,
"learning_rate": 1.3613926431670824e-05,
"loss": 1.479,
"step": 1344
},
{
"epoch": 0.38884070540618676,
"grad_norm": 1.113613288350207,
"learning_rate": 1.360536981789872e-05,
"loss": 1.618,
"step": 1345
},
{
"epoch": 0.3891298063023995,
"grad_norm": 1.1367651938150571,
"learning_rate": 1.3596810168963599e-05,
"loss": 1.5512,
"step": 1346
},
{
"epoch": 0.38941890719861233,
"grad_norm": 1.079517622534508,
"learning_rate": 1.358824749207136e-05,
"loss": 1.5395,
"step": 1347
},
{
"epoch": 0.3897080080948251,
"grad_norm": 1.143875281810773,
"learning_rate": 1.3579681794430454e-05,
"loss": 1.6185,
"step": 1348
},
{
"epoch": 0.3899971089910379,
"grad_norm": 1.1732842322479438,
"learning_rate": 1.3571113083251865e-05,
"loss": 1.6675,
"step": 1349
},
{
"epoch": 0.39028620988725066,
"grad_norm": 1.1246289227681265,
"learning_rate": 1.356254136574913e-05,
"loss": 1.6558,
"step": 1350
},
{
"epoch": 0.3905753107834634,
"grad_norm": 1.165298085541357,
"learning_rate": 1.3553966649138297e-05,
"loss": 1.5468,
"step": 1351
},
{
"epoch": 0.3908644116796762,
"grad_norm": 1.1344138977255307,
"learning_rate": 1.3545388940637958e-05,
"loss": 1.5932,
"step": 1352
},
{
"epoch": 0.391153512575889,
"grad_norm": 1.0690031880578144,
"learning_rate": 1.3536808247469213e-05,
"loss": 1.5697,
"step": 1353
},
{
"epoch": 0.39144261347210174,
"grad_norm": 1.0975749907120596,
"learning_rate": 1.352822457685567e-05,
"loss": 1.6245,
"step": 1354
},
{
"epoch": 0.39173171436831455,
"grad_norm": 1.0927297214562863,
"learning_rate": 1.3519637936023462e-05,
"loss": 1.6026,
"step": 1355
},
{
"epoch": 0.3920208152645273,
"grad_norm": 1.0537160535977657,
"learning_rate": 1.3511048332201204e-05,
"loss": 1.4856,
"step": 1356
},
{
"epoch": 0.3923099161607401,
"grad_norm": 1.0920768107193122,
"learning_rate": 1.3502455772620012e-05,
"loss": 1.4882,
"step": 1357
},
{
"epoch": 0.3925990170569529,
"grad_norm": 1.0757741953402,
"learning_rate": 1.3493860264513492e-05,
"loss": 1.6938,
"step": 1358
},
{
"epoch": 0.39288811795316564,
"grad_norm": 1.1184693762965427,
"learning_rate": 1.348526181511773e-05,
"loss": 1.6371,
"step": 1359
},
{
"epoch": 0.39317721884937845,
"grad_norm": 1.0887972408619184,
"learning_rate": 1.3476660431671288e-05,
"loss": 1.5472,
"step": 1360
},
{
"epoch": 0.3934663197455912,
"grad_norm": 1.1259522274979648,
"learning_rate": 1.34680561214152e-05,
"loss": 1.5498,
"step": 1361
},
{
"epoch": 0.39375542064180397,
"grad_norm": 1.1133474449428702,
"learning_rate": 1.3459448891592962e-05,
"loss": 1.5723,
"step": 1362
},
{
"epoch": 0.3940445215380168,
"grad_norm": 1.1499692550396208,
"learning_rate": 1.3450838749450529e-05,
"loss": 1.484,
"step": 1363
},
{
"epoch": 0.39433362243422954,
"grad_norm": 1.0715825700296768,
"learning_rate": 1.3442225702236307e-05,
"loss": 1.5612,
"step": 1364
},
{
"epoch": 0.39462272333044235,
"grad_norm": 1.077029585331996,
"learning_rate": 1.3433609757201147e-05,
"loss": 1.5889,
"step": 1365
},
{
"epoch": 0.3949118242266551,
"grad_norm": 1.1501089531692315,
"learning_rate": 1.342499092159834e-05,
"loss": 1.4946,
"step": 1366
},
{
"epoch": 0.39520092512286786,
"grad_norm": 1.096740007082763,
"learning_rate": 1.3416369202683615e-05,
"loss": 1.5497,
"step": 1367
},
{
"epoch": 0.3954900260190807,
"grad_norm": 1.0987454926481957,
"learning_rate": 1.3407744607715119e-05,
"loss": 1.5969,
"step": 1368
},
{
"epoch": 0.39577912691529343,
"grad_norm": 1.060961357189651,
"learning_rate": 1.3399117143953426e-05,
"loss": 1.5441,
"step": 1369
},
{
"epoch": 0.3960682278115062,
"grad_norm": 1.1032994325908025,
"learning_rate": 1.3390486818661526e-05,
"loss": 1.6306,
"step": 1370
},
{
"epoch": 0.396357328707719,
"grad_norm": 1.1047680776182227,
"learning_rate": 1.3381853639104817e-05,
"loss": 1.5242,
"step": 1371
},
{
"epoch": 0.39664642960393176,
"grad_norm": 1.1022023946061226,
"learning_rate": 1.33732176125511e-05,
"loss": 1.6319,
"step": 1372
},
{
"epoch": 0.3969355305001446,
"grad_norm": 1.0789182866424254,
"learning_rate": 1.3364578746270564e-05,
"loss": 1.6542,
"step": 1373
},
{
"epoch": 0.39722463139635733,
"grad_norm": 1.13836495763387,
"learning_rate": 1.3355937047535806e-05,
"loss": 1.5158,
"step": 1374
},
{
"epoch": 0.3975137322925701,
"grad_norm": 1.1151693144482542,
"learning_rate": 1.3347292523621795e-05,
"loss": 1.6513,
"step": 1375
},
{
"epoch": 0.3978028331887829,
"grad_norm": 1.0532473348160287,
"learning_rate": 1.3338645181805876e-05,
"loss": 1.5503,
"step": 1376
},
{
"epoch": 0.39809193408499566,
"grad_norm": 1.119386196223983,
"learning_rate": 1.3329995029367782e-05,
"loss": 1.5879,
"step": 1377
},
{
"epoch": 0.3983810349812084,
"grad_norm": 1.0973825255595782,
"learning_rate": 1.3321342073589595e-05,
"loss": 1.5115,
"step": 1378
},
{
"epoch": 0.39867013587742123,
"grad_norm": 1.0693127903553905,
"learning_rate": 1.331268632175576e-05,
"loss": 1.6588,
"step": 1379
},
{
"epoch": 0.398959236773634,
"grad_norm": 1.0823638073171304,
"learning_rate": 1.330402778115309e-05,
"loss": 1.589,
"step": 1380
},
{
"epoch": 0.3992483376698468,
"grad_norm": 1.0805775803416322,
"learning_rate": 1.3295366459070724e-05,
"loss": 1.5052,
"step": 1381
},
{
"epoch": 0.39953743856605956,
"grad_norm": 1.0443203230920821,
"learning_rate": 1.328670236280016e-05,
"loss": 1.4888,
"step": 1382
},
{
"epoch": 0.3998265394622723,
"grad_norm": 1.1511681114820926,
"learning_rate": 1.3278035499635226e-05,
"loss": 1.6123,
"step": 1383
},
{
"epoch": 0.4001156403584851,
"grad_norm": 1.0863214241630808,
"learning_rate": 1.3269365876872078e-05,
"loss": 1.5493,
"step": 1384
},
{
"epoch": 0.4004047412546979,
"grad_norm": 1.0838040505052533,
"learning_rate": 1.3260693501809193e-05,
"loss": 1.6153,
"step": 1385
},
{
"epoch": 0.4006938421509107,
"grad_norm": 1.1500580767514397,
"learning_rate": 1.3252018381747367e-05,
"loss": 1.5897,
"step": 1386
},
{
"epoch": 0.40098294304712345,
"grad_norm": 1.0845655171544064,
"learning_rate": 1.3243340523989714e-05,
"loss": 1.5783,
"step": 1387
},
{
"epoch": 0.4012720439433362,
"grad_norm": 1.1970928864429935,
"learning_rate": 1.3234659935841641e-05,
"loss": 1.6129,
"step": 1388
},
{
"epoch": 0.401561144839549,
"grad_norm": 1.101894564995156,
"learning_rate": 1.3225976624610856e-05,
"loss": 1.5264,
"step": 1389
},
{
"epoch": 0.4018502457357618,
"grad_norm": 1.1047369726724903,
"learning_rate": 1.3217290597607373e-05,
"loss": 1.6217,
"step": 1390
},
{
"epoch": 0.40213934663197454,
"grad_norm": 1.1482189531968385,
"learning_rate": 1.3208601862143468e-05,
"loss": 1.6808,
"step": 1391
},
{
"epoch": 0.40242844752818735,
"grad_norm": 1.1210248814188173,
"learning_rate": 1.3199910425533721e-05,
"loss": 1.5766,
"step": 1392
},
{
"epoch": 0.4027175484244001,
"grad_norm": 1.1685854469228043,
"learning_rate": 1.3191216295094971e-05,
"loss": 1.4749,
"step": 1393
},
{
"epoch": 0.4030066493206129,
"grad_norm": 1.0797246071856748,
"learning_rate": 1.318251947814633e-05,
"loss": 1.6377,
"step": 1394
},
{
"epoch": 0.4032957502168257,
"grad_norm": 1.0635773691483372,
"learning_rate": 1.3173819982009173e-05,
"loss": 1.4949,
"step": 1395
},
{
"epoch": 0.40358485111303843,
"grad_norm": 1.115897033244827,
"learning_rate": 1.3165117814007124e-05,
"loss": 1.6583,
"step": 1396
},
{
"epoch": 0.40387395200925125,
"grad_norm": 1.0989308444609356,
"learning_rate": 1.3156412981466064e-05,
"loss": 1.649,
"step": 1397
},
{
"epoch": 0.404163052905464,
"grad_norm": 1.0797697429688853,
"learning_rate": 1.3147705491714113e-05,
"loss": 1.5827,
"step": 1398
},
{
"epoch": 0.40445215380167676,
"grad_norm": 1.0834261852077045,
"learning_rate": 1.3138995352081634e-05,
"loss": 1.5503,
"step": 1399
},
{
"epoch": 0.4047412546978896,
"grad_norm": 1.0787814057307044,
"learning_rate": 1.3130282569901205e-05,
"loss": 1.5232,
"step": 1400
}
],
"logging_steps": 1,
"max_steps": 3459,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 700,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 146565758976000.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}