codingrev's picture
Upload 107 files
ec288d7 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.9998572957545488,
"eval_steps": 500,
"global_step": 7007,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0028540849090260435,
"grad_norm": 7.177610874176025,
"learning_rate": 5.707762557077626e-07,
"loss": 5.7328,
"step": 10
},
{
"epoch": 0.005708169818052087,
"grad_norm": 7.953944206237793,
"learning_rate": 1.1415525114155251e-06,
"loss": 5.4553,
"step": 20
},
{
"epoch": 0.00856225472707813,
"grad_norm": 7.924602031707764,
"learning_rate": 1.7123287671232877e-06,
"loss": 5.6662,
"step": 30
},
{
"epoch": 0.011416339636104174,
"grad_norm": 8.706378936767578,
"learning_rate": 2.2831050228310503e-06,
"loss": 5.503,
"step": 40
},
{
"epoch": 0.014270424545130217,
"grad_norm": 8.828363418579102,
"learning_rate": 2.853881278538813e-06,
"loss": 5.2946,
"step": 50
},
{
"epoch": 0.01712450945415626,
"grad_norm": 11.124424934387207,
"learning_rate": 3.4246575342465754e-06,
"loss": 4.9995,
"step": 60
},
{
"epoch": 0.019978594363182306,
"grad_norm": 16.35590934753418,
"learning_rate": 3.995433789954338e-06,
"loss": 4.4733,
"step": 70
},
{
"epoch": 0.022832679272208348,
"grad_norm": 9.050875663757324,
"learning_rate": 4.566210045662101e-06,
"loss": 3.389,
"step": 80
},
{
"epoch": 0.025686764181234393,
"grad_norm": 10.061305046081543,
"learning_rate": 5.136986301369863e-06,
"loss": 2.475,
"step": 90
},
{
"epoch": 0.028540849090260435,
"grad_norm": 4.466187000274658,
"learning_rate": 5.707762557077626e-06,
"loss": 1.4959,
"step": 100
},
{
"epoch": 0.03139493399928648,
"grad_norm": 3.469721794128418,
"learning_rate": 6.278538812785388e-06,
"loss": 1.1382,
"step": 110
},
{
"epoch": 0.03424901890831252,
"grad_norm": 5.091841220855713,
"learning_rate": 6.849315068493151e-06,
"loss": 0.8312,
"step": 120
},
{
"epoch": 0.03710310381733856,
"grad_norm": 2.3277525901794434,
"learning_rate": 7.4200913242009134e-06,
"loss": 0.7045,
"step": 130
},
{
"epoch": 0.03995718872636461,
"grad_norm": 2.6082115173339844,
"learning_rate": 7.990867579908676e-06,
"loss": 0.7053,
"step": 140
},
{
"epoch": 0.042811273635390654,
"grad_norm": 2.5840566158294678,
"learning_rate": 8.561643835616438e-06,
"loss": 0.5612,
"step": 150
},
{
"epoch": 0.045665358544416695,
"grad_norm": 1.6896095275878906,
"learning_rate": 9.132420091324201e-06,
"loss": 0.536,
"step": 160
},
{
"epoch": 0.04851944345344274,
"grad_norm": 4.644432067871094,
"learning_rate": 9.703196347031963e-06,
"loss": 0.4792,
"step": 170
},
{
"epoch": 0.051373528362468786,
"grad_norm": 2.0224084854125977,
"learning_rate": 1.0273972602739726e-05,
"loss": 0.4958,
"step": 180
},
{
"epoch": 0.05422761327149483,
"grad_norm": 2.7462992668151855,
"learning_rate": 1.0844748858447488e-05,
"loss": 0.5412,
"step": 190
},
{
"epoch": 0.05708169818052087,
"grad_norm": 3.044191598892212,
"learning_rate": 1.1415525114155251e-05,
"loss": 0.5006,
"step": 200
},
{
"epoch": 0.05993578308954691,
"grad_norm": 2.856959819793701,
"learning_rate": 1.1986301369863013e-05,
"loss": 0.4759,
"step": 210
},
{
"epoch": 0.06278986799857296,
"grad_norm": 1.736061453819275,
"learning_rate": 1.2557077625570777e-05,
"loss": 0.4691,
"step": 220
},
{
"epoch": 0.065643952907599,
"grad_norm": 1.445332646369934,
"learning_rate": 1.312785388127854e-05,
"loss": 0.4628,
"step": 230
},
{
"epoch": 0.06849803781662504,
"grad_norm": 2.229682683944702,
"learning_rate": 1.3698630136986302e-05,
"loss": 0.4981,
"step": 240
},
{
"epoch": 0.07135212272565108,
"grad_norm": 4.1906232833862305,
"learning_rate": 1.4269406392694065e-05,
"loss": 0.4352,
"step": 250
},
{
"epoch": 0.07420620763467713,
"grad_norm": 3.204361915588379,
"learning_rate": 1.4840182648401827e-05,
"loss": 0.4872,
"step": 260
},
{
"epoch": 0.07706029254370317,
"grad_norm": 2.531733989715576,
"learning_rate": 1.541095890410959e-05,
"loss": 0.4652,
"step": 270
},
{
"epoch": 0.07991437745272922,
"grad_norm": 2.0460851192474365,
"learning_rate": 1.5981735159817352e-05,
"loss": 0.5005,
"step": 280
},
{
"epoch": 0.08276846236175527,
"grad_norm": 3.3922574520111084,
"learning_rate": 1.6552511415525115e-05,
"loss": 0.4896,
"step": 290
},
{
"epoch": 0.08562254727078131,
"grad_norm": 2.1291298866271973,
"learning_rate": 1.7123287671232875e-05,
"loss": 0.4396,
"step": 300
},
{
"epoch": 0.08847663217980735,
"grad_norm": 3.9003682136535645,
"learning_rate": 1.769406392694064e-05,
"loss": 0.467,
"step": 310
},
{
"epoch": 0.09133071708883339,
"grad_norm": 3.01649808883667,
"learning_rate": 1.8264840182648402e-05,
"loss": 0.4752,
"step": 320
},
{
"epoch": 0.09418480199785943,
"grad_norm": 1.4664050340652466,
"learning_rate": 1.8835616438356166e-05,
"loss": 0.4519,
"step": 330
},
{
"epoch": 0.09703888690688547,
"grad_norm": 1.8165019750595093,
"learning_rate": 1.9406392694063926e-05,
"loss": 0.4778,
"step": 340
},
{
"epoch": 0.09989297181591153,
"grad_norm": 1.9969810247421265,
"learning_rate": 1.997716894977169e-05,
"loss": 0.446,
"step": 350
},
{
"epoch": 0.10274705672493757,
"grad_norm": 1.4795584678649902,
"learning_rate": 2.0547945205479453e-05,
"loss": 0.4847,
"step": 360
},
{
"epoch": 0.10560114163396361,
"grad_norm": 1.3624811172485352,
"learning_rate": 2.1118721461187216e-05,
"loss": 0.4902,
"step": 370
},
{
"epoch": 0.10845522654298966,
"grad_norm": 2.039226770401001,
"learning_rate": 2.1689497716894976e-05,
"loss": 0.4595,
"step": 380
},
{
"epoch": 0.1113093114520157,
"grad_norm": 1.597818374633789,
"learning_rate": 2.226027397260274e-05,
"loss": 0.4481,
"step": 390
},
{
"epoch": 0.11416339636104174,
"grad_norm": 1.184199571609497,
"learning_rate": 2.2831050228310503e-05,
"loss": 0.4659,
"step": 400
},
{
"epoch": 0.11701748127006778,
"grad_norm": 2.6335010528564453,
"learning_rate": 2.3401826484018266e-05,
"loss": 0.4932,
"step": 410
},
{
"epoch": 0.11987156617909382,
"grad_norm": 1.4489330053329468,
"learning_rate": 2.3972602739726026e-05,
"loss": 0.4646,
"step": 420
},
{
"epoch": 0.12272565108811988,
"grad_norm": 2.0063445568084717,
"learning_rate": 2.454337899543379e-05,
"loss": 0.4544,
"step": 430
},
{
"epoch": 0.12557973599714592,
"grad_norm": 2.1495394706726074,
"learning_rate": 2.5114155251141553e-05,
"loss": 0.4479,
"step": 440
},
{
"epoch": 0.12843382090617195,
"grad_norm": 1.3138567209243774,
"learning_rate": 2.568493150684932e-05,
"loss": 0.4727,
"step": 450
},
{
"epoch": 0.131287905815198,
"grad_norm": 1.474777340888977,
"learning_rate": 2.625570776255708e-05,
"loss": 0.4613,
"step": 460
},
{
"epoch": 0.13414199072422406,
"grad_norm": 2.4200029373168945,
"learning_rate": 2.682648401826484e-05,
"loss": 0.4794,
"step": 470
},
{
"epoch": 0.1369960756332501,
"grad_norm": 1.476733922958374,
"learning_rate": 2.7397260273972603e-05,
"loss": 0.4764,
"step": 480
},
{
"epoch": 0.13985016054227614,
"grad_norm": 4.004658222198486,
"learning_rate": 2.796803652968037e-05,
"loss": 0.4796,
"step": 490
},
{
"epoch": 0.14270424545130217,
"grad_norm": 1.8049726486206055,
"learning_rate": 2.853881278538813e-05,
"loss": 0.4883,
"step": 500
},
{
"epoch": 0.14555833036032823,
"grad_norm": 1.7194474935531616,
"learning_rate": 2.910958904109589e-05,
"loss": 0.4749,
"step": 510
},
{
"epoch": 0.14841241526935425,
"grad_norm": 1.2992069721221924,
"learning_rate": 2.9680365296803654e-05,
"loss": 0.448,
"step": 520
},
{
"epoch": 0.1512665001783803,
"grad_norm": 1.0582475662231445,
"learning_rate": 3.025114155251142e-05,
"loss": 0.4781,
"step": 530
},
{
"epoch": 0.15412058508740634,
"grad_norm": 2.1239373683929443,
"learning_rate": 3.082191780821918e-05,
"loss": 0.5293,
"step": 540
},
{
"epoch": 0.1569746699964324,
"grad_norm": 1.3766371011734009,
"learning_rate": 3.1392694063926944e-05,
"loss": 0.4626,
"step": 550
},
{
"epoch": 0.15982875490545845,
"grad_norm": 1.2140746116638184,
"learning_rate": 3.1963470319634704e-05,
"loss": 0.4471,
"step": 560
},
{
"epoch": 0.16268283981448448,
"grad_norm": 1.7624636888504028,
"learning_rate": 3.253424657534247e-05,
"loss": 0.4691,
"step": 570
},
{
"epoch": 0.16553692472351053,
"grad_norm": 3.22637939453125,
"learning_rate": 3.310502283105023e-05,
"loss": 0.5514,
"step": 580
},
{
"epoch": 0.16839100963253656,
"grad_norm": 3.3661112785339355,
"learning_rate": 3.367579908675799e-05,
"loss": 0.5277,
"step": 590
},
{
"epoch": 0.17124509454156261,
"grad_norm": 2.009028673171997,
"learning_rate": 3.424657534246575e-05,
"loss": 0.4961,
"step": 600
},
{
"epoch": 0.17409917945058864,
"grad_norm": 1.145951271057129,
"learning_rate": 3.481735159817352e-05,
"loss": 0.484,
"step": 610
},
{
"epoch": 0.1769532643596147,
"grad_norm": 1.4422398805618286,
"learning_rate": 3.538812785388128e-05,
"loss": 0.4913,
"step": 620
},
{
"epoch": 0.17980734926864075,
"grad_norm": 1.5047334432601929,
"learning_rate": 3.5958904109589045e-05,
"loss": 0.468,
"step": 630
},
{
"epoch": 0.18266143417766678,
"grad_norm": 1.358590841293335,
"learning_rate": 3.6529680365296805e-05,
"loss": 0.4549,
"step": 640
},
{
"epoch": 0.18551551908669284,
"grad_norm": 1.2067798376083374,
"learning_rate": 3.710045662100457e-05,
"loss": 0.4726,
"step": 650
},
{
"epoch": 0.18836960399571887,
"grad_norm": 1.3069053888320923,
"learning_rate": 3.767123287671233e-05,
"loss": 0.4998,
"step": 660
},
{
"epoch": 0.19122368890474492,
"grad_norm": 1.4003655910491943,
"learning_rate": 3.824200913242009e-05,
"loss": 0.4396,
"step": 670
},
{
"epoch": 0.19407777381377095,
"grad_norm": 1.6738018989562988,
"learning_rate": 3.881278538812785e-05,
"loss": 0.4782,
"step": 680
},
{
"epoch": 0.196931858722797,
"grad_norm": 1.3846429586410522,
"learning_rate": 3.938356164383562e-05,
"loss": 0.4873,
"step": 690
},
{
"epoch": 0.19978594363182306,
"grad_norm": 1.5841200351715088,
"learning_rate": 3.995433789954338e-05,
"loss": 0.4568,
"step": 700
},
{
"epoch": 0.2026400285408491,
"grad_norm": 1.7131880521774292,
"learning_rate": 4.0525114155251145e-05,
"loss": 0.5105,
"step": 710
},
{
"epoch": 0.20549411344987514,
"grad_norm": 0.7919635772705078,
"learning_rate": 4.1095890410958905e-05,
"loss": 0.4417,
"step": 720
},
{
"epoch": 0.20834819835890117,
"grad_norm": 1.2708427906036377,
"learning_rate": 4.166666666666667e-05,
"loss": 0.454,
"step": 730
},
{
"epoch": 0.21120228326792723,
"grad_norm": 1.4845744371414185,
"learning_rate": 4.223744292237443e-05,
"loss": 0.4498,
"step": 740
},
{
"epoch": 0.21405636817695325,
"grad_norm": 1.4763469696044922,
"learning_rate": 4.280821917808219e-05,
"loss": 0.4724,
"step": 750
},
{
"epoch": 0.2169104530859793,
"grad_norm": 1.6870049238204956,
"learning_rate": 4.337899543378995e-05,
"loss": 0.4885,
"step": 760
},
{
"epoch": 0.21976453799500534,
"grad_norm": 0.9296655058860779,
"learning_rate": 4.394977168949772e-05,
"loss": 0.4669,
"step": 770
},
{
"epoch": 0.2226186229040314,
"grad_norm": 0.876143753528595,
"learning_rate": 4.452054794520548e-05,
"loss": 0.4569,
"step": 780
},
{
"epoch": 0.22547270781305745,
"grad_norm": 0.651347815990448,
"learning_rate": 4.5091324200913246e-05,
"loss": 0.4307,
"step": 790
},
{
"epoch": 0.22832679272208348,
"grad_norm": 0.7317978739738464,
"learning_rate": 4.5662100456621006e-05,
"loss": 0.4713,
"step": 800
},
{
"epoch": 0.23118087763110953,
"grad_norm": 1.1030404567718506,
"learning_rate": 4.623287671232877e-05,
"loss": 0.4387,
"step": 810
},
{
"epoch": 0.23403496254013556,
"grad_norm": 1.3097269535064697,
"learning_rate": 4.680365296803653e-05,
"loss": 0.4327,
"step": 820
},
{
"epoch": 0.23688904744916162,
"grad_norm": 1.6860710382461548,
"learning_rate": 4.737442922374429e-05,
"loss": 0.4658,
"step": 830
},
{
"epoch": 0.23974313235818764,
"grad_norm": 0.9696588516235352,
"learning_rate": 4.794520547945205e-05,
"loss": 0.485,
"step": 840
},
{
"epoch": 0.2425972172672137,
"grad_norm": 1.1960479021072388,
"learning_rate": 4.851598173515982e-05,
"loss": 0.454,
"step": 850
},
{
"epoch": 0.24545130217623976,
"grad_norm": 1.4926533699035645,
"learning_rate": 4.908675799086758e-05,
"loss": 0.5058,
"step": 860
},
{
"epoch": 0.24830538708526578,
"grad_norm": 0.5784097909927368,
"learning_rate": 4.9657534246575346e-05,
"loss": 0.4072,
"step": 870
},
{
"epoch": 0.25115947199429184,
"grad_norm": 1.0179357528686523,
"learning_rate": 5.0228310502283106e-05,
"loss": 0.4462,
"step": 880
},
{
"epoch": 0.2540135569033179,
"grad_norm": 0.9068560600280762,
"learning_rate": 5.0799086757990866e-05,
"loss": 0.4674,
"step": 890
},
{
"epoch": 0.2568676418123439,
"grad_norm": 1.4015111923217773,
"learning_rate": 5.136986301369864e-05,
"loss": 0.4528,
"step": 900
},
{
"epoch": 0.25972172672136995,
"grad_norm": 0.6935715675354004,
"learning_rate": 5.19406392694064e-05,
"loss": 0.4841,
"step": 910
},
{
"epoch": 0.262575811630396,
"grad_norm": 1.1978791952133179,
"learning_rate": 5.251141552511416e-05,
"loss": 0.4707,
"step": 920
},
{
"epoch": 0.26542989653942206,
"grad_norm": 0.9195595979690552,
"learning_rate": 5.308219178082192e-05,
"loss": 0.4538,
"step": 930
},
{
"epoch": 0.2682839814484481,
"grad_norm": 1.5978795289993286,
"learning_rate": 5.365296803652968e-05,
"loss": 0.4536,
"step": 940
},
{
"epoch": 0.2711380663574741,
"grad_norm": 1.3926838636398315,
"learning_rate": 5.422374429223745e-05,
"loss": 0.49,
"step": 950
},
{
"epoch": 0.2739921512665002,
"grad_norm": 0.575034499168396,
"learning_rate": 5.479452054794521e-05,
"loss": 0.5136,
"step": 960
},
{
"epoch": 0.27684623617552623,
"grad_norm": 0.6068254709243774,
"learning_rate": 5.536529680365297e-05,
"loss": 0.4552,
"step": 970
},
{
"epoch": 0.2797003210845523,
"grad_norm": 0.7864544987678528,
"learning_rate": 5.593607305936074e-05,
"loss": 0.492,
"step": 980
},
{
"epoch": 0.2825544059935783,
"grad_norm": 1.3433411121368408,
"learning_rate": 5.65068493150685e-05,
"loss": 0.4795,
"step": 990
},
{
"epoch": 0.28540849090260434,
"grad_norm": 0.7735553979873657,
"learning_rate": 5.707762557077626e-05,
"loss": 0.4538,
"step": 1000
},
{
"epoch": 0.2882625758116304,
"grad_norm": 0.6869261264801025,
"learning_rate": 5.764840182648402e-05,
"loss": 0.4821,
"step": 1010
},
{
"epoch": 0.29111666072065645,
"grad_norm": 1.081992506980896,
"learning_rate": 5.821917808219178e-05,
"loss": 0.4669,
"step": 1020
},
{
"epoch": 0.2939707456296825,
"grad_norm": 0.8736602663993835,
"learning_rate": 5.878995433789955e-05,
"loss": 0.4253,
"step": 1030
},
{
"epoch": 0.2968248305387085,
"grad_norm": 1.1186368465423584,
"learning_rate": 5.936073059360731e-05,
"loss": 0.469,
"step": 1040
},
{
"epoch": 0.29967891544773456,
"grad_norm": 0.6147318482398987,
"learning_rate": 5.993150684931507e-05,
"loss": 0.4573,
"step": 1050
},
{
"epoch": 0.3025330003567606,
"grad_norm": 1.0282665491104126,
"learning_rate": 6.050228310502284e-05,
"loss": 0.4448,
"step": 1060
},
{
"epoch": 0.3053870852657867,
"grad_norm": 1.3858377933502197,
"learning_rate": 6.10730593607306e-05,
"loss": 0.4453,
"step": 1070
},
{
"epoch": 0.3082411701748127,
"grad_norm": 0.9879347085952759,
"learning_rate": 6.164383561643835e-05,
"loss": 0.4441,
"step": 1080
},
{
"epoch": 0.31109525508383873,
"grad_norm": 0.7240331172943115,
"learning_rate": 6.221461187214613e-05,
"loss": 0.4408,
"step": 1090
},
{
"epoch": 0.3139493399928648,
"grad_norm": 0.7207432389259338,
"learning_rate": 6.278538812785389e-05,
"loss": 0.4753,
"step": 1100
},
{
"epoch": 0.31680342490189084,
"grad_norm": 1.230485439300537,
"learning_rate": 6.335616438356165e-05,
"loss": 0.4815,
"step": 1110
},
{
"epoch": 0.3196575098109169,
"grad_norm": 0.7309743762016296,
"learning_rate": 6.392694063926941e-05,
"loss": 0.424,
"step": 1120
},
{
"epoch": 0.3225115947199429,
"grad_norm": 0.919762134552002,
"learning_rate": 6.449771689497717e-05,
"loss": 0.4576,
"step": 1130
},
{
"epoch": 0.32536567962896895,
"grad_norm": 0.6879755258560181,
"learning_rate": 6.506849315068494e-05,
"loss": 0.4349,
"step": 1140
},
{
"epoch": 0.328219764537995,
"grad_norm": 0.8196412920951843,
"learning_rate": 6.56392694063927e-05,
"loss": 0.4955,
"step": 1150
},
{
"epoch": 0.33107384944702106,
"grad_norm": 1.016493558883667,
"learning_rate": 6.621004566210046e-05,
"loss": 0.4656,
"step": 1160
},
{
"epoch": 0.3339279343560471,
"grad_norm": 0.560720682144165,
"learning_rate": 6.678082191780822e-05,
"loss": 0.4251,
"step": 1170
},
{
"epoch": 0.3367820192650731,
"grad_norm": 0.5806992053985596,
"learning_rate": 6.735159817351598e-05,
"loss": 0.4293,
"step": 1180
},
{
"epoch": 0.3396361041740992,
"grad_norm": 0.9094499349594116,
"learning_rate": 6.792237442922374e-05,
"loss": 0.4143,
"step": 1190
},
{
"epoch": 0.34249018908312523,
"grad_norm": 0.5154547691345215,
"learning_rate": 6.84931506849315e-05,
"loss": 0.4682,
"step": 1200
},
{
"epoch": 0.3453442739921513,
"grad_norm": 0.7153740525245667,
"learning_rate": 6.906392694063926e-05,
"loss": 0.4717,
"step": 1210
},
{
"epoch": 0.3481983589011773,
"grad_norm": 0.6453626155853271,
"learning_rate": 6.963470319634704e-05,
"loss": 0.4609,
"step": 1220
},
{
"epoch": 0.35105244381020334,
"grad_norm": 0.6944445371627808,
"learning_rate": 7.02054794520548e-05,
"loss": 0.4446,
"step": 1230
},
{
"epoch": 0.3539065287192294,
"grad_norm": 0.5920615196228027,
"learning_rate": 7.077625570776256e-05,
"loss": 0.4863,
"step": 1240
},
{
"epoch": 0.35676061362825545,
"grad_norm": 0.8108448386192322,
"learning_rate": 7.134703196347033e-05,
"loss": 0.4713,
"step": 1250
},
{
"epoch": 0.3596146985372815,
"grad_norm": 0.6366176605224609,
"learning_rate": 7.191780821917809e-05,
"loss": 0.4338,
"step": 1260
},
{
"epoch": 0.3624687834463075,
"grad_norm": 0.9244728088378906,
"learning_rate": 7.248858447488585e-05,
"loss": 0.4319,
"step": 1270
},
{
"epoch": 0.36532286835533356,
"grad_norm": 0.7847846150398254,
"learning_rate": 7.305936073059361e-05,
"loss": 0.4692,
"step": 1280
},
{
"epoch": 0.3681769532643596,
"grad_norm": 0.9149333238601685,
"learning_rate": 7.363013698630137e-05,
"loss": 0.4677,
"step": 1290
},
{
"epoch": 0.3710310381733857,
"grad_norm": 0.7170394659042358,
"learning_rate": 7.420091324200914e-05,
"loss": 0.4261,
"step": 1300
},
{
"epoch": 0.3738851230824117,
"grad_norm": 1.0459473133087158,
"learning_rate": 7.47716894977169e-05,
"loss": 0.4709,
"step": 1310
},
{
"epoch": 0.37673920799143773,
"grad_norm": 1.1265268325805664,
"learning_rate": 7.534246575342466e-05,
"loss": 0.4799,
"step": 1320
},
{
"epoch": 0.3795932929004638,
"grad_norm": 0.7591957449913025,
"learning_rate": 7.591324200913242e-05,
"loss": 0.4438,
"step": 1330
},
{
"epoch": 0.38244737780948984,
"grad_norm": 0.5419506430625916,
"learning_rate": 7.648401826484018e-05,
"loss": 0.4578,
"step": 1340
},
{
"epoch": 0.3853014627185159,
"grad_norm": 0.5713040828704834,
"learning_rate": 7.705479452054794e-05,
"loss": 0.4633,
"step": 1350
},
{
"epoch": 0.3881555476275419,
"grad_norm": 0.9401557445526123,
"learning_rate": 7.76255707762557e-05,
"loss": 0.4517,
"step": 1360
},
{
"epoch": 0.39100963253656795,
"grad_norm": 0.9244952201843262,
"learning_rate": 7.819634703196348e-05,
"loss": 0.4429,
"step": 1370
},
{
"epoch": 0.393863717445594,
"grad_norm": 0.6746932864189148,
"learning_rate": 7.876712328767124e-05,
"loss": 0.4595,
"step": 1380
},
{
"epoch": 0.39671780235462006,
"grad_norm": 0.7216023206710815,
"learning_rate": 7.9337899543379e-05,
"loss": 0.4364,
"step": 1390
},
{
"epoch": 0.3995718872636461,
"grad_norm": 0.8057281374931335,
"learning_rate": 7.990867579908676e-05,
"loss": 0.4532,
"step": 1400
},
{
"epoch": 0.4024259721726721,
"grad_norm": 0.5679146647453308,
"learning_rate": 8.047945205479453e-05,
"loss": 0.42,
"step": 1410
},
{
"epoch": 0.4052800570816982,
"grad_norm": 0.45307865738868713,
"learning_rate": 8.105022831050229e-05,
"loss": 0.4454,
"step": 1420
},
{
"epoch": 0.40813414199072423,
"grad_norm": 0.559577226638794,
"learning_rate": 8.162100456621005e-05,
"loss": 0.4723,
"step": 1430
},
{
"epoch": 0.4109882268997503,
"grad_norm": 1.2319309711456299,
"learning_rate": 8.219178082191781e-05,
"loss": 0.4659,
"step": 1440
},
{
"epoch": 0.4138423118087763,
"grad_norm": 0.6527321338653564,
"learning_rate": 8.276255707762558e-05,
"loss": 0.4235,
"step": 1450
},
{
"epoch": 0.41669639671780234,
"grad_norm": 0.6203667521476746,
"learning_rate": 8.333333333333334e-05,
"loss": 0.4565,
"step": 1460
},
{
"epoch": 0.4195504816268284,
"grad_norm": 0.9035118818283081,
"learning_rate": 8.39041095890411e-05,
"loss": 0.4528,
"step": 1470
},
{
"epoch": 0.42240456653585445,
"grad_norm": 1.0064053535461426,
"learning_rate": 8.447488584474886e-05,
"loss": 0.4358,
"step": 1480
},
{
"epoch": 0.4252586514448805,
"grad_norm": 0.7333278656005859,
"learning_rate": 8.504566210045662e-05,
"loss": 0.4523,
"step": 1490
},
{
"epoch": 0.4281127363539065,
"grad_norm": 0.7744036912918091,
"learning_rate": 8.561643835616438e-05,
"loss": 0.4208,
"step": 1500
},
{
"epoch": 0.43096682126293256,
"grad_norm": 1.0139415264129639,
"learning_rate": 8.618721461187214e-05,
"loss": 0.4566,
"step": 1510
},
{
"epoch": 0.4338209061719586,
"grad_norm": 0.83629310131073,
"learning_rate": 8.67579908675799e-05,
"loss": 0.4434,
"step": 1520
},
{
"epoch": 0.4366749910809847,
"grad_norm": 0.7974062561988831,
"learning_rate": 8.732876712328768e-05,
"loss": 0.4562,
"step": 1530
},
{
"epoch": 0.4395290759900107,
"grad_norm": 0.6136748194694519,
"learning_rate": 8.789954337899544e-05,
"loss": 0.4556,
"step": 1540
},
{
"epoch": 0.44238316089903673,
"grad_norm": 0.6411725282669067,
"learning_rate": 8.84703196347032e-05,
"loss": 0.4601,
"step": 1550
},
{
"epoch": 0.4452372458080628,
"grad_norm": 0.9157238006591797,
"learning_rate": 8.904109589041096e-05,
"loss": 0.4473,
"step": 1560
},
{
"epoch": 0.44809133071708884,
"grad_norm": 0.5935428142547607,
"learning_rate": 8.961187214611873e-05,
"loss": 0.4374,
"step": 1570
},
{
"epoch": 0.4509454156261149,
"grad_norm": 0.6518482565879822,
"learning_rate": 9.018264840182649e-05,
"loss": 0.4898,
"step": 1580
},
{
"epoch": 0.4537995005351409,
"grad_norm": 0.990638792514801,
"learning_rate": 9.075342465753425e-05,
"loss": 0.4623,
"step": 1590
},
{
"epoch": 0.45665358544416695,
"grad_norm": 0.7768042683601379,
"learning_rate": 9.132420091324201e-05,
"loss": 0.4203,
"step": 1600
},
{
"epoch": 0.459507670353193,
"grad_norm": 0.49145108461380005,
"learning_rate": 9.189497716894979e-05,
"loss": 0.4454,
"step": 1610
},
{
"epoch": 0.46236175526221907,
"grad_norm": 0.7254750728607178,
"learning_rate": 9.246575342465755e-05,
"loss": 0.4204,
"step": 1620
},
{
"epoch": 0.4652158401712451,
"grad_norm": 0.5650269985198975,
"learning_rate": 9.30365296803653e-05,
"loss": 0.4431,
"step": 1630
},
{
"epoch": 0.4680699250802711,
"grad_norm": 0.9648821353912354,
"learning_rate": 9.360730593607307e-05,
"loss": 0.4389,
"step": 1640
},
{
"epoch": 0.4709240099892972,
"grad_norm": 0.6625390648841858,
"learning_rate": 9.417808219178083e-05,
"loss": 0.4491,
"step": 1650
},
{
"epoch": 0.47377809489832323,
"grad_norm": 0.8872827291488647,
"learning_rate": 9.474885844748859e-05,
"loss": 0.443,
"step": 1660
},
{
"epoch": 0.4766321798073493,
"grad_norm": 0.6523913741111755,
"learning_rate": 9.531963470319635e-05,
"loss": 0.421,
"step": 1670
},
{
"epoch": 0.4794862647163753,
"grad_norm": 0.48721668124198914,
"learning_rate": 9.58904109589041e-05,
"loss": 0.4762,
"step": 1680
},
{
"epoch": 0.48234034962540134,
"grad_norm": 0.808788537979126,
"learning_rate": 9.646118721461188e-05,
"loss": 0.4383,
"step": 1690
},
{
"epoch": 0.4851944345344274,
"grad_norm": 1.0816537141799927,
"learning_rate": 9.703196347031964e-05,
"loss": 0.4273,
"step": 1700
},
{
"epoch": 0.48804851944345345,
"grad_norm": 0.9300740361213684,
"learning_rate": 9.76027397260274e-05,
"loss": 0.4383,
"step": 1710
},
{
"epoch": 0.4909026043524795,
"grad_norm": 0.6394598484039307,
"learning_rate": 9.817351598173516e-05,
"loss": 0.4864,
"step": 1720
},
{
"epoch": 0.4937566892615055,
"grad_norm": 0.6434561014175415,
"learning_rate": 9.874429223744292e-05,
"loss": 0.4815,
"step": 1730
},
{
"epoch": 0.49661077417053157,
"grad_norm": 0.7413051128387451,
"learning_rate": 9.931506849315069e-05,
"loss": 0.465,
"step": 1740
},
{
"epoch": 0.4994648590795576,
"grad_norm": 0.6108921766281128,
"learning_rate": 9.988584474885845e-05,
"loss": 0.4645,
"step": 1750
},
{
"epoch": 0.5023189439885837,
"grad_norm": 0.6062872409820557,
"learning_rate": 9.999993644614703e-05,
"loss": 0.4244,
"step": 1760
},
{
"epoch": 0.5051730288976097,
"grad_norm": 0.7179297208786011,
"learning_rate": 9.999967825889622e-05,
"loss": 0.437,
"step": 1770
},
{
"epoch": 0.5080271138066358,
"grad_norm": 0.7407391667366028,
"learning_rate": 9.999922146715655e-05,
"loss": 0.4506,
"step": 1780
},
{
"epoch": 0.5108811987156618,
"grad_norm": 0.38226214051246643,
"learning_rate": 9.999856607274242e-05,
"loss": 0.3888,
"step": 1790
},
{
"epoch": 0.5137352836246878,
"grad_norm": 0.8683974146842957,
"learning_rate": 9.999771207825713e-05,
"loss": 0.445,
"step": 1800
},
{
"epoch": 0.5165893685337138,
"grad_norm": 0.6052406430244446,
"learning_rate": 9.999665948709287e-05,
"loss": 0.393,
"step": 1810
},
{
"epoch": 0.5194434534427399,
"grad_norm": 0.9383887052536011,
"learning_rate": 9.999540830343064e-05,
"loss": 0.4655,
"step": 1820
},
{
"epoch": 0.522297538351766,
"grad_norm": 0.6511524319648743,
"learning_rate": 9.999395853224028e-05,
"loss": 0.4633,
"step": 1830
},
{
"epoch": 0.525151623260792,
"grad_norm": 0.3971940875053406,
"learning_rate": 9.999231017928045e-05,
"loss": 0.408,
"step": 1840
},
{
"epoch": 0.5280057081698181,
"grad_norm": 0.6968293786048889,
"learning_rate": 9.99904632510986e-05,
"loss": 0.4559,
"step": 1850
},
{
"epoch": 0.5308597930788441,
"grad_norm": 0.47270509600639343,
"learning_rate": 9.998841775503096e-05,
"loss": 0.4257,
"step": 1860
},
{
"epoch": 0.5337138779878702,
"grad_norm": 0.7707135081291199,
"learning_rate": 9.998617369920244e-05,
"loss": 0.4326,
"step": 1870
},
{
"epoch": 0.5365679628968962,
"grad_norm": 0.720313549041748,
"learning_rate": 9.998373109252672e-05,
"loss": 0.4257,
"step": 1880
},
{
"epoch": 0.5394220478059222,
"grad_norm": 0.9472813606262207,
"learning_rate": 9.998108994470612e-05,
"loss": 0.4477,
"step": 1890
},
{
"epoch": 0.5422761327149482,
"grad_norm": 0.5608553290367126,
"learning_rate": 9.997825026623155e-05,
"loss": 0.4542,
"step": 1900
},
{
"epoch": 0.5451302176239743,
"grad_norm": 1.084322452545166,
"learning_rate": 9.997521206838255e-05,
"loss": 0.4208,
"step": 1910
},
{
"epoch": 0.5479843025330003,
"grad_norm": 0.6105670928955078,
"learning_rate": 9.997197536322722e-05,
"loss": 0.4282,
"step": 1920
},
{
"epoch": 0.5508383874420264,
"grad_norm": 0.5767210721969604,
"learning_rate": 9.99685401636221e-05,
"loss": 0.455,
"step": 1930
},
{
"epoch": 0.5536924723510525,
"grad_norm": 0.5981484055519104,
"learning_rate": 9.996490648321219e-05,
"loss": 0.4424,
"step": 1940
},
{
"epoch": 0.5565465572600785,
"grad_norm": 0.7510049939155579,
"learning_rate": 9.996107433643092e-05,
"loss": 0.4533,
"step": 1950
},
{
"epoch": 0.5594006421691046,
"grad_norm": 0.9091824889183044,
"learning_rate": 9.99570437385e-05,
"loss": 0.4232,
"step": 1960
},
{
"epoch": 0.5622547270781306,
"grad_norm": 0.5205058455467224,
"learning_rate": 9.995281470542943e-05,
"loss": 0.4501,
"step": 1970
},
{
"epoch": 0.5651088119871566,
"grad_norm": 0.5514882206916809,
"learning_rate": 9.994838725401743e-05,
"loss": 0.4258,
"step": 1980
},
{
"epoch": 0.5679628968961826,
"grad_norm": 0.6759376525878906,
"learning_rate": 9.994376140185032e-05,
"loss": 0.4764,
"step": 1990
},
{
"epoch": 0.5708169818052087,
"grad_norm": 0.5412331223487854,
"learning_rate": 9.993893716730255e-05,
"loss": 0.4606,
"step": 2000
},
{
"epoch": 0.5736710667142347,
"grad_norm": 0.7248188257217407,
"learning_rate": 9.993391456953651e-05,
"loss": 0.4417,
"step": 2010
},
{
"epoch": 0.5765251516232608,
"grad_norm": 0.6278032660484314,
"learning_rate": 9.992869362850256e-05,
"loss": 0.4354,
"step": 2020
},
{
"epoch": 0.5793792365322868,
"grad_norm": 0.7813991904258728,
"learning_rate": 9.992327436493886e-05,
"loss": 0.4386,
"step": 2030
},
{
"epoch": 0.5822333214413129,
"grad_norm": 0.6477668881416321,
"learning_rate": 9.991765680037137e-05,
"loss": 0.3989,
"step": 2040
},
{
"epoch": 0.585087406350339,
"grad_norm": 0.6790643930435181,
"learning_rate": 9.991184095711368e-05,
"loss": 0.4251,
"step": 2050
},
{
"epoch": 0.587941491259365,
"grad_norm": 0.6338703632354736,
"learning_rate": 9.990582685826701e-05,
"loss": 0.4251,
"step": 2060
},
{
"epoch": 0.590795576168391,
"grad_norm": 0.5298501253128052,
"learning_rate": 9.989961452772005e-05,
"loss": 0.4457,
"step": 2070
},
{
"epoch": 0.593649661077417,
"grad_norm": 0.547738790512085,
"learning_rate": 9.98932039901489e-05,
"loss": 0.447,
"step": 2080
},
{
"epoch": 0.5965037459864431,
"grad_norm": 0.6610301733016968,
"learning_rate": 9.988659527101693e-05,
"loss": 0.43,
"step": 2090
},
{
"epoch": 0.5993578308954691,
"grad_norm": 0.5846606492996216,
"learning_rate": 9.987978839657475e-05,
"loss": 0.4594,
"step": 2100
},
{
"epoch": 0.6022119158044952,
"grad_norm": 0.5017244815826416,
"learning_rate": 9.987278339386006e-05,
"loss": 0.4136,
"step": 2110
},
{
"epoch": 0.6050660007135212,
"grad_norm": 0.6239372491836548,
"learning_rate": 9.986558029069753e-05,
"loss": 0.4765,
"step": 2120
},
{
"epoch": 0.6079200856225473,
"grad_norm": 0.4270966053009033,
"learning_rate": 9.98581791156987e-05,
"loss": 0.421,
"step": 2130
},
{
"epoch": 0.6107741705315733,
"grad_norm": 0.9911853075027466,
"learning_rate": 9.985057989826193e-05,
"loss": 0.4398,
"step": 2140
},
{
"epoch": 0.6136282554405994,
"grad_norm": 0.5237308740615845,
"learning_rate": 9.984278266857215e-05,
"loss": 0.4036,
"step": 2150
},
{
"epoch": 0.6164823403496253,
"grad_norm": 0.7680717706680298,
"learning_rate": 9.983478745760088e-05,
"loss": 0.4579,
"step": 2160
},
{
"epoch": 0.6193364252586514,
"grad_norm": 0.6343058347702026,
"learning_rate": 9.982659429710601e-05,
"loss": 0.4119,
"step": 2170
},
{
"epoch": 0.6221905101676775,
"grad_norm": 0.4680488109588623,
"learning_rate": 9.981820321963171e-05,
"loss": 0.4047,
"step": 2180
},
{
"epoch": 0.6250445950767035,
"grad_norm": 0.4934472143650055,
"learning_rate": 9.980961425850832e-05,
"loss": 0.4377,
"step": 2190
},
{
"epoch": 0.6278986799857296,
"grad_norm": 0.6324202418327332,
"learning_rate": 9.980082744785219e-05,
"loss": 0.4417,
"step": 2200
},
{
"epoch": 0.6307527648947556,
"grad_norm": 0.5263383984565735,
"learning_rate": 9.979184282256553e-05,
"loss": 0.4166,
"step": 2210
},
{
"epoch": 0.6336068498037817,
"grad_norm": 1.1357102394104004,
"learning_rate": 9.978266041833631e-05,
"loss": 0.4536,
"step": 2220
},
{
"epoch": 0.6364609347128077,
"grad_norm": 1.1446198225021362,
"learning_rate": 9.977328027163811e-05,
"loss": 0.4449,
"step": 2230
},
{
"epoch": 0.6393150196218338,
"grad_norm": 0.4317645728588104,
"learning_rate": 9.976370241972995e-05,
"loss": 0.4161,
"step": 2240
},
{
"epoch": 0.6421691045308598,
"grad_norm": 0.6288971304893494,
"learning_rate": 9.975392690065616e-05,
"loss": 0.4469,
"step": 2250
},
{
"epoch": 0.6450231894398858,
"grad_norm": 0.5151458978652954,
"learning_rate": 9.974395375324622e-05,
"loss": 0.4478,
"step": 2260
},
{
"epoch": 0.6478772743489118,
"grad_norm": 0.6855816841125488,
"learning_rate": 9.973378301711465e-05,
"loss": 0.437,
"step": 2270
},
{
"epoch": 0.6507313592579379,
"grad_norm": 0.5615636110305786,
"learning_rate": 9.972341473266074e-05,
"loss": 0.4475,
"step": 2280
},
{
"epoch": 0.653585444166964,
"grad_norm": 0.758758544921875,
"learning_rate": 9.971284894106856e-05,
"loss": 0.4066,
"step": 2290
},
{
"epoch": 0.65643952907599,
"grad_norm": 0.5864000916481018,
"learning_rate": 9.970208568430662e-05,
"loss": 0.4327,
"step": 2300
},
{
"epoch": 0.6592936139850161,
"grad_norm": 0.5502752065658569,
"learning_rate": 9.969112500512784e-05,
"loss": 0.4266,
"step": 2310
},
{
"epoch": 0.6621476988940421,
"grad_norm": 0.9890068769454956,
"learning_rate": 9.96799669470693e-05,
"loss": 0.4279,
"step": 2320
},
{
"epoch": 0.6650017838030682,
"grad_norm": 0.6124778389930725,
"learning_rate": 9.966861155445207e-05,
"loss": 0.4138,
"step": 2330
},
{
"epoch": 0.6678558687120942,
"grad_norm": 0.6781124472618103,
"learning_rate": 9.965705887238109e-05,
"loss": 0.4264,
"step": 2340
},
{
"epoch": 0.6707099536211202,
"grad_norm": 0.5691377520561218,
"learning_rate": 9.964530894674495e-05,
"loss": 0.4407,
"step": 2350
},
{
"epoch": 0.6735640385301462,
"grad_norm": 0.5900623202323914,
"learning_rate": 9.963336182421572e-05,
"loss": 0.4134,
"step": 2360
},
{
"epoch": 0.6764181234391723,
"grad_norm": 0.9385659694671631,
"learning_rate": 9.962121755224874e-05,
"loss": 0.4488,
"step": 2370
},
{
"epoch": 0.6792722083481983,
"grad_norm": 0.5161449909210205,
"learning_rate": 9.960887617908245e-05,
"loss": 0.4443,
"step": 2380
},
{
"epoch": 0.6821262932572244,
"grad_norm": 0.636301577091217,
"learning_rate": 9.959633775373823e-05,
"loss": 0.4416,
"step": 2390
},
{
"epoch": 0.6849803781662505,
"grad_norm": 0.666099488735199,
"learning_rate": 9.958360232602013e-05,
"loss": 0.4629,
"step": 2400
},
{
"epoch": 0.6878344630752765,
"grad_norm": 0.8457536697387695,
"learning_rate": 9.957066994651474e-05,
"loss": 0.4516,
"step": 2410
},
{
"epoch": 0.6906885479843026,
"grad_norm": 0.753018319606781,
"learning_rate": 9.955754066659096e-05,
"loss": 0.4199,
"step": 2420
},
{
"epoch": 0.6935426328933286,
"grad_norm": 0.6624453663825989,
"learning_rate": 9.954421453839983e-05,
"loss": 0.4301,
"step": 2430
},
{
"epoch": 0.6963967178023546,
"grad_norm": 0.5142033696174622,
"learning_rate": 9.953069161487422e-05,
"loss": 0.445,
"step": 2440
},
{
"epoch": 0.6992508027113806,
"grad_norm": 0.6645984053611755,
"learning_rate": 9.95169719497288e-05,
"loss": 0.4784,
"step": 2450
},
{
"epoch": 0.7021048876204067,
"grad_norm": 0.767155647277832,
"learning_rate": 9.950305559745963e-05,
"loss": 0.4029,
"step": 2460
},
{
"epoch": 0.7049589725294327,
"grad_norm": 0.7413857579231262,
"learning_rate": 9.948894261334408e-05,
"loss": 0.4558,
"step": 2470
},
{
"epoch": 0.7078130574384588,
"grad_norm": 0.6691272854804993,
"learning_rate": 9.947463305344051e-05,
"loss": 0.4196,
"step": 2480
},
{
"epoch": 0.7106671423474848,
"grad_norm": 0.7997813820838928,
"learning_rate": 9.946012697458819e-05,
"loss": 0.4381,
"step": 2490
},
{
"epoch": 0.7135212272565109,
"grad_norm": 1.3109943866729736,
"learning_rate": 9.944542443440693e-05,
"loss": 0.4342,
"step": 2500
},
{
"epoch": 0.716375312165537,
"grad_norm": 0.6375836730003357,
"learning_rate": 9.94305254912969e-05,
"loss": 0.4314,
"step": 2510
},
{
"epoch": 0.719229397074563,
"grad_norm": 0.5794687271118164,
"learning_rate": 9.941543020443843e-05,
"loss": 0.444,
"step": 2520
},
{
"epoch": 0.722083481983589,
"grad_norm": 0.5072224736213684,
"learning_rate": 9.940013863379173e-05,
"loss": 0.4442,
"step": 2530
},
{
"epoch": 0.724937566892615,
"grad_norm": 0.6260116696357727,
"learning_rate": 9.93846508400967e-05,
"loss": 0.4761,
"step": 2540
},
{
"epoch": 0.7277916518016411,
"grad_norm": 0.5651645660400391,
"learning_rate": 9.936896688487262e-05,
"loss": 0.48,
"step": 2550
},
{
"epoch": 0.7306457367106671,
"grad_norm": 1.0688337087631226,
"learning_rate": 9.9353086830418e-05,
"loss": 0.4256,
"step": 2560
},
{
"epoch": 0.7334998216196932,
"grad_norm": 0.7255706191062927,
"learning_rate": 9.933701073981023e-05,
"loss": 0.4464,
"step": 2570
},
{
"epoch": 0.7363539065287192,
"grad_norm": 0.5594896078109741,
"learning_rate": 9.932073867690539e-05,
"loss": 0.4456,
"step": 2580
},
{
"epoch": 0.7392079914377453,
"grad_norm": 0.5755929946899414,
"learning_rate": 9.930427070633798e-05,
"loss": 0.4013,
"step": 2590
},
{
"epoch": 0.7420620763467713,
"grad_norm": 0.8663392066955566,
"learning_rate": 9.928760689352072e-05,
"loss": 0.4306,
"step": 2600
},
{
"epoch": 0.7449161612557974,
"grad_norm": 0.6546819806098938,
"learning_rate": 9.927074730464417e-05,
"loss": 0.4352,
"step": 2610
},
{
"epoch": 0.7477702461648233,
"grad_norm": 0.6611933708190918,
"learning_rate": 9.925369200667655e-05,
"loss": 0.4681,
"step": 2620
},
{
"epoch": 0.7506243310738494,
"grad_norm": 0.9629554152488708,
"learning_rate": 9.923644106736348e-05,
"loss": 0.4626,
"step": 2630
},
{
"epoch": 0.7534784159828755,
"grad_norm": 0.6556516289710999,
"learning_rate": 9.92189945552277e-05,
"loss": 0.4009,
"step": 2640
},
{
"epoch": 0.7563325008919015,
"grad_norm": 0.5907385945320129,
"learning_rate": 9.920135253956875e-05,
"loss": 0.4303,
"step": 2650
},
{
"epoch": 0.7591865858009276,
"grad_norm": 0.5351846218109131,
"learning_rate": 9.918351509046276e-05,
"loss": 0.4192,
"step": 2660
},
{
"epoch": 0.7620406707099536,
"grad_norm": 0.8660167455673218,
"learning_rate": 9.91654822787621e-05,
"loss": 0.4525,
"step": 2670
},
{
"epoch": 0.7648947556189797,
"grad_norm": 0.5204272270202637,
"learning_rate": 9.914725417609523e-05,
"loss": 0.3889,
"step": 2680
},
{
"epoch": 0.7677488405280057,
"grad_norm": 0.8953636884689331,
"learning_rate": 9.912883085486626e-05,
"loss": 0.4623,
"step": 2690
},
{
"epoch": 0.7706029254370318,
"grad_norm": 0.6904731392860413,
"learning_rate": 9.911021238825473e-05,
"loss": 0.4251,
"step": 2700
},
{
"epoch": 0.7734570103460578,
"grad_norm": 0.4080071449279785,
"learning_rate": 9.909139885021535e-05,
"loss": 0.4477,
"step": 2710
},
{
"epoch": 0.7763110952550838,
"grad_norm": 0.6172974705696106,
"learning_rate": 9.907239031547765e-05,
"loss": 0.4164,
"step": 2720
},
{
"epoch": 0.7791651801641098,
"grad_norm": 0.4886259436607361,
"learning_rate": 9.905318685954574e-05,
"loss": 0.421,
"step": 2730
},
{
"epoch": 0.7820192650731359,
"grad_norm": 1.2826932668685913,
"learning_rate": 9.903378855869797e-05,
"loss": 0.4093,
"step": 2740
},
{
"epoch": 0.784873349982162,
"grad_norm": 0.7179902195930481,
"learning_rate": 9.901419548998658e-05,
"loss": 0.4022,
"step": 2750
},
{
"epoch": 0.787727434891188,
"grad_norm": 0.559853732585907,
"learning_rate": 9.899440773123756e-05,
"loss": 0.4364,
"step": 2760
},
{
"epoch": 0.7905815198002141,
"grad_norm": 1.151147723197937,
"learning_rate": 9.897442536105013e-05,
"loss": 0.4179,
"step": 2770
},
{
"epoch": 0.7934356047092401,
"grad_norm": 0.7972052097320557,
"learning_rate": 9.895424845879657e-05,
"loss": 0.4534,
"step": 2780
},
{
"epoch": 0.7962896896182662,
"grad_norm": 1.4866856336593628,
"learning_rate": 9.893387710462189e-05,
"loss": 0.4501,
"step": 2790
},
{
"epoch": 0.7991437745272922,
"grad_norm": 0.6486208438873291,
"learning_rate": 9.891331137944344e-05,
"loss": 0.4327,
"step": 2800
},
{
"epoch": 0.8019978594363182,
"grad_norm": 0.4823531210422516,
"learning_rate": 9.889255136495063e-05,
"loss": 0.4188,
"step": 2810
},
{
"epoch": 0.8048519443453442,
"grad_norm": 0.4850338399410248,
"learning_rate": 9.887159714360469e-05,
"loss": 0.4096,
"step": 2820
},
{
"epoch": 0.8077060292543703,
"grad_norm": 0.6615185737609863,
"learning_rate": 9.885044879863816e-05,
"loss": 0.4626,
"step": 2830
},
{
"epoch": 0.8105601141633964,
"grad_norm": 0.639529824256897,
"learning_rate": 9.88291064140547e-05,
"loss": 0.4125,
"step": 2840
},
{
"epoch": 0.8134141990724224,
"grad_norm": 0.6544278860092163,
"learning_rate": 9.880757007462876e-05,
"loss": 0.4319,
"step": 2850
},
{
"epoch": 0.8162682839814485,
"grad_norm": 0.8010094165802002,
"learning_rate": 9.878583986590513e-05,
"loss": 0.4206,
"step": 2860
},
{
"epoch": 0.8191223688904745,
"grad_norm": 0.9454021453857422,
"learning_rate": 9.876391587419871e-05,
"loss": 0.4327,
"step": 2870
},
{
"epoch": 0.8219764537995006,
"grad_norm": 0.5547480583190918,
"learning_rate": 9.874179818659415e-05,
"loss": 0.448,
"step": 2880
},
{
"epoch": 0.8248305387085266,
"grad_norm": 0.526059091091156,
"learning_rate": 9.871948689094542e-05,
"loss": 0.413,
"step": 2890
},
{
"epoch": 0.8276846236175526,
"grad_norm": 0.5893587470054626,
"learning_rate": 9.869698207587558e-05,
"loss": 0.4044,
"step": 2900
},
{
"epoch": 0.8305387085265786,
"grad_norm": 0.5328799486160278,
"learning_rate": 9.867428383077637e-05,
"loss": 0.4255,
"step": 2910
},
{
"epoch": 0.8333927934356047,
"grad_norm": 0.4376719295978546,
"learning_rate": 9.865139224580779e-05,
"loss": 0.3751,
"step": 2920
},
{
"epoch": 0.8362468783446307,
"grad_norm": 0.9210516214370728,
"learning_rate": 9.86283074118979e-05,
"loss": 0.4657,
"step": 2930
},
{
"epoch": 0.8391009632536568,
"grad_norm": 0.5722871422767639,
"learning_rate": 9.86050294207423e-05,
"loss": 0.435,
"step": 2940
},
{
"epoch": 0.8419550481626829,
"grad_norm": 0.6614554524421692,
"learning_rate": 9.858155836480387e-05,
"loss": 0.4466,
"step": 2950
},
{
"epoch": 0.8448091330717089,
"grad_norm": 0.8073657751083374,
"learning_rate": 9.855789433731234e-05,
"loss": 0.405,
"step": 2960
},
{
"epoch": 0.847663217980735,
"grad_norm": 0.5362779498100281,
"learning_rate": 9.853403743226393e-05,
"loss": 0.4344,
"step": 2970
},
{
"epoch": 0.850517302889761,
"grad_norm": 0.6456807851791382,
"learning_rate": 9.850998774442106e-05,
"loss": 0.4305,
"step": 2980
},
{
"epoch": 0.853371387798787,
"grad_norm": 0.5409958958625793,
"learning_rate": 9.848574536931185e-05,
"loss": 0.4312,
"step": 2990
},
{
"epoch": 0.856225472707813,
"grad_norm": 0.5728853940963745,
"learning_rate": 9.846131040322983e-05,
"loss": 0.4364,
"step": 3000
},
{
"epoch": 0.8590795576168391,
"grad_norm": 0.5256248116493225,
"learning_rate": 9.843668294323348e-05,
"loss": 0.4201,
"step": 3010
},
{
"epoch": 0.8619336425258651,
"grad_norm": 0.8561432957649231,
"learning_rate": 9.841186308714589e-05,
"loss": 0.4336,
"step": 3020
},
{
"epoch": 0.8647877274348912,
"grad_norm": 0.7229063510894775,
"learning_rate": 9.838685093355444e-05,
"loss": 0.407,
"step": 3030
},
{
"epoch": 0.8676418123439172,
"grad_norm": 0.8342879414558411,
"learning_rate": 9.836164658181026e-05,
"loss": 0.4489,
"step": 3040
},
{
"epoch": 0.8704958972529433,
"grad_norm": 0.5657804608345032,
"learning_rate": 9.833625013202794e-05,
"loss": 0.4169,
"step": 3050
},
{
"epoch": 0.8733499821619694,
"grad_norm": 0.5953682065010071,
"learning_rate": 9.831066168508514e-05,
"loss": 0.408,
"step": 3060
},
{
"epoch": 0.8762040670709954,
"grad_norm": 0.456233948469162,
"learning_rate": 9.828488134262208e-05,
"loss": 0.4163,
"step": 3070
},
{
"epoch": 0.8790581519800214,
"grad_norm": 0.7670127749443054,
"learning_rate": 9.825890920704126e-05,
"loss": 0.4373,
"step": 3080
},
{
"epoch": 0.8819122368890474,
"grad_norm": 0.9011564254760742,
"learning_rate": 9.823274538150702e-05,
"loss": 0.3886,
"step": 3090
},
{
"epoch": 0.8847663217980735,
"grad_norm": 0.5500415563583374,
"learning_rate": 9.820638996994509e-05,
"loss": 0.4382,
"step": 3100
},
{
"epoch": 0.8876204067070995,
"grad_norm": 0.6603699922561646,
"learning_rate": 9.81798430770422e-05,
"loss": 0.4768,
"step": 3110
},
{
"epoch": 0.8904744916161256,
"grad_norm": 0.43730512261390686,
"learning_rate": 9.815310480824564e-05,
"loss": 0.3954,
"step": 3120
},
{
"epoch": 0.8933285765251516,
"grad_norm": 0.5685926675796509,
"learning_rate": 9.812617526976295e-05,
"loss": 0.4326,
"step": 3130
},
{
"epoch": 0.8961826614341777,
"grad_norm": 0.4876101016998291,
"learning_rate": 9.809905456856135e-05,
"loss": 0.4197,
"step": 3140
},
{
"epoch": 0.8990367463432037,
"grad_norm": 0.6296002864837646,
"learning_rate": 9.807174281236735e-05,
"loss": 0.4574,
"step": 3150
},
{
"epoch": 0.9018908312522298,
"grad_norm": 0.5232740640640259,
"learning_rate": 9.804424010966644e-05,
"loss": 0.4251,
"step": 3160
},
{
"epoch": 0.9047449161612559,
"grad_norm": 0.42207297682762146,
"learning_rate": 9.801654656970252e-05,
"loss": 0.3989,
"step": 3170
},
{
"epoch": 0.9075990010702818,
"grad_norm": 0.6646615266799927,
"learning_rate": 9.798866230247752e-05,
"loss": 0.4171,
"step": 3180
},
{
"epoch": 0.9104530859793079,
"grad_norm": 0.6919196248054504,
"learning_rate": 9.796058741875095e-05,
"loss": 0.4811,
"step": 3190
},
{
"epoch": 0.9133071708883339,
"grad_norm": 0.5413965582847595,
"learning_rate": 9.79323220300395e-05,
"loss": 0.4665,
"step": 3200
},
{
"epoch": 0.91616125579736,
"grad_norm": 0.8154647946357727,
"learning_rate": 9.790386624861657e-05,
"loss": 0.4236,
"step": 3210
},
{
"epoch": 0.919015340706386,
"grad_norm": 0.48265111446380615,
"learning_rate": 9.787522018751179e-05,
"loss": 0.419,
"step": 3220
},
{
"epoch": 0.9218694256154121,
"grad_norm": 0.7374817728996277,
"learning_rate": 9.78463839605106e-05,
"loss": 0.4372,
"step": 3230
},
{
"epoch": 0.9247235105244381,
"grad_norm": 0.7971628904342651,
"learning_rate": 9.781735768215385e-05,
"loss": 0.4593,
"step": 3240
},
{
"epoch": 0.9275775954334642,
"grad_norm": 0.49169448018074036,
"learning_rate": 9.778814146773727e-05,
"loss": 0.4407,
"step": 3250
},
{
"epoch": 0.9304316803424902,
"grad_norm": 0.8247508406639099,
"learning_rate": 9.7758735433311e-05,
"loss": 0.3936,
"step": 3260
},
{
"epoch": 0.9332857652515162,
"grad_norm": 0.4885646402835846,
"learning_rate": 9.772913969567923e-05,
"loss": 0.4159,
"step": 3270
},
{
"epoch": 0.9361398501605422,
"grad_norm": 0.5817092657089233,
"learning_rate": 9.76993543723996e-05,
"loss": 0.4822,
"step": 3280
},
{
"epoch": 0.9389939350695683,
"grad_norm": 0.5657879114151001,
"learning_rate": 9.766937958178293e-05,
"loss": 0.4002,
"step": 3290
},
{
"epoch": 0.9418480199785944,
"grad_norm": 0.6319302916526794,
"learning_rate": 9.763921544289245e-05,
"loss": 0.4502,
"step": 3300
},
{
"epoch": 0.9447021048876204,
"grad_norm": 0.7825123071670532,
"learning_rate": 9.760886207554367e-05,
"loss": 0.426,
"step": 3310
},
{
"epoch": 0.9475561897966465,
"grad_norm": 0.6912452578544617,
"learning_rate": 9.757831960030364e-05,
"loss": 0.4202,
"step": 3320
},
{
"epoch": 0.9504102747056725,
"grad_norm": 0.6349189877510071,
"learning_rate": 9.754758813849059e-05,
"loss": 0.4484,
"step": 3330
},
{
"epoch": 0.9532643596146986,
"grad_norm": 0.5008338689804077,
"learning_rate": 9.751666781217343e-05,
"loss": 0.4398,
"step": 3340
},
{
"epoch": 0.9561184445237246,
"grad_norm": 0.8321357369422913,
"learning_rate": 9.748555874417128e-05,
"loss": 0.42,
"step": 3350
},
{
"epoch": 0.9589725294327506,
"grad_norm": 0.454465389251709,
"learning_rate": 9.745426105805293e-05,
"loss": 0.3906,
"step": 3360
},
{
"epoch": 0.9618266143417766,
"grad_norm": 0.48528704047203064,
"learning_rate": 9.74227748781364e-05,
"loss": 0.4359,
"step": 3370
},
{
"epoch": 0.9646806992508027,
"grad_norm": 0.47498252987861633,
"learning_rate": 9.739110032948844e-05,
"loss": 0.3875,
"step": 3380
},
{
"epoch": 0.9675347841598287,
"grad_norm": 0.5873503088951111,
"learning_rate": 9.7359237537924e-05,
"loss": 0.4297,
"step": 3390
},
{
"epoch": 0.9703888690688548,
"grad_norm": 0.47873497009277344,
"learning_rate": 9.732718663000577e-05,
"loss": 0.4172,
"step": 3400
},
{
"epoch": 0.9732429539778809,
"grad_norm": 0.5299973487854004,
"learning_rate": 9.729494773304364e-05,
"loss": 0.4078,
"step": 3410
},
{
"epoch": 0.9760970388869069,
"grad_norm": 0.5915161967277527,
"learning_rate": 9.726252097509423e-05,
"loss": 0.4236,
"step": 3420
},
{
"epoch": 0.978951123795933,
"grad_norm": 0.87859046459198,
"learning_rate": 9.722990648496038e-05,
"loss": 0.4555,
"step": 3430
},
{
"epoch": 0.981805208704959,
"grad_norm": 0.6019312739372253,
"learning_rate": 9.719710439219058e-05,
"loss": 0.4058,
"step": 3440
},
{
"epoch": 0.984659293613985,
"grad_norm": 0.8020815253257751,
"learning_rate": 9.716411482707857e-05,
"loss": 0.4319,
"step": 3450
},
{
"epoch": 0.987513378523011,
"grad_norm": 0.7268204689025879,
"learning_rate": 9.713093792066266e-05,
"loss": 0.4416,
"step": 3460
},
{
"epoch": 0.9903674634320371,
"grad_norm": 0.6523804664611816,
"learning_rate": 9.709757380472536e-05,
"loss": 0.4043,
"step": 3470
},
{
"epoch": 0.9932215483410631,
"grad_norm": 0.6375195980072021,
"learning_rate": 9.706402261179279e-05,
"loss": 0.3974,
"step": 3480
},
{
"epoch": 0.9960756332500892,
"grad_norm": 0.9598755836486816,
"learning_rate": 9.703028447513417e-05,
"loss": 0.4353,
"step": 3490
},
{
"epoch": 0.9989297181591152,
"grad_norm": 0.4769824147224426,
"learning_rate": 9.699635952876127e-05,
"loss": 0.4413,
"step": 3500
},
{
"epoch": 1.0017838030681412,
"grad_norm": 0.6290175318717957,
"learning_rate": 9.696224790742787e-05,
"loss": 0.3933,
"step": 3510
},
{
"epoch": 1.0046378879771674,
"grad_norm": 0.6167151927947998,
"learning_rate": 9.692794974662927e-05,
"loss": 0.4242,
"step": 3520
},
{
"epoch": 1.0074919728861933,
"grad_norm": 0.44450104236602783,
"learning_rate": 9.689346518260174e-05,
"loss": 0.3619,
"step": 3530
},
{
"epoch": 1.0103460577952195,
"grad_norm": 0.9434749484062195,
"learning_rate": 9.685879435232193e-05,
"loss": 0.43,
"step": 3540
},
{
"epoch": 1.0132001427042454,
"grad_norm": 0.479621022939682,
"learning_rate": 9.682393739350637e-05,
"loss": 0.4174,
"step": 3550
},
{
"epoch": 1.0160542276132716,
"grad_norm": 0.5258568525314331,
"learning_rate": 9.678889444461093e-05,
"loss": 0.434,
"step": 3560
},
{
"epoch": 1.0189083125222975,
"grad_norm": 0.4461469054222107,
"learning_rate": 9.675366564483026e-05,
"loss": 0.3974,
"step": 3570
},
{
"epoch": 1.0217623974313237,
"grad_norm": 0.38621410727500916,
"learning_rate": 9.671825113409718e-05,
"loss": 0.4231,
"step": 3580
},
{
"epoch": 1.0246164823403496,
"grad_norm": 0.6458470821380615,
"learning_rate": 9.668265105308223e-05,
"loss": 0.4388,
"step": 3590
},
{
"epoch": 1.0274705672493756,
"grad_norm": 0.9359676241874695,
"learning_rate": 9.664686554319306e-05,
"loss": 0.4499,
"step": 3600
},
{
"epoch": 1.0303246521584017,
"grad_norm": 0.5093625783920288,
"learning_rate": 9.661089474657381e-05,
"loss": 0.3919,
"step": 3610
},
{
"epoch": 1.0331787370674277,
"grad_norm": 0.6743724346160889,
"learning_rate": 9.657473880610463e-05,
"loss": 0.4199,
"step": 3620
},
{
"epoch": 1.0360328219764539,
"grad_norm": 0.774372935295105,
"learning_rate": 9.653839786540114e-05,
"loss": 0.3975,
"step": 3630
},
{
"epoch": 1.0388869068854798,
"grad_norm": 0.6048170924186707,
"learning_rate": 9.650187206881368e-05,
"loss": 0.4094,
"step": 3640
},
{
"epoch": 1.041740991794506,
"grad_norm": 0.706270158290863,
"learning_rate": 9.646516156142695e-05,
"loss": 0.4304,
"step": 3650
},
{
"epoch": 1.044595076703532,
"grad_norm": 0.6507980823516846,
"learning_rate": 9.642826648905934e-05,
"loss": 0.4429,
"step": 3660
},
{
"epoch": 1.047449161612558,
"grad_norm": 0.7644017338752747,
"learning_rate": 9.639118699826231e-05,
"loss": 0.4192,
"step": 3670
},
{
"epoch": 1.050303246521584,
"grad_norm": 0.9183304905891418,
"learning_rate": 9.635392323631988e-05,
"loss": 0.4053,
"step": 3680
},
{
"epoch": 1.05315733143061,
"grad_norm": 0.6468967199325562,
"learning_rate": 9.631647535124802e-05,
"loss": 0.3995,
"step": 3690
},
{
"epoch": 1.0560114163396361,
"grad_norm": 1.1233046054840088,
"learning_rate": 9.627884349179402e-05,
"loss": 0.4151,
"step": 3700
},
{
"epoch": 1.058865501248662,
"grad_norm": 0.5151073932647705,
"learning_rate": 9.6241027807436e-05,
"loss": 0.4262,
"step": 3710
},
{
"epoch": 1.0617195861576882,
"grad_norm": 0.8094540238380432,
"learning_rate": 9.62030284483822e-05,
"loss": 0.4258,
"step": 3720
},
{
"epoch": 1.0645736710667142,
"grad_norm": 0.6203951239585876,
"learning_rate": 9.616484556557048e-05,
"loss": 0.4371,
"step": 3730
},
{
"epoch": 1.0674277559757404,
"grad_norm": 0.5027265548706055,
"learning_rate": 9.612647931066763e-05,
"loss": 0.4004,
"step": 3740
},
{
"epoch": 1.0702818408847663,
"grad_norm": 0.4848248362541199,
"learning_rate": 9.608792983606885e-05,
"loss": 0.3917,
"step": 3750
},
{
"epoch": 1.0731359257937925,
"grad_norm": 0.6018698811531067,
"learning_rate": 9.604919729489708e-05,
"loss": 0.3912,
"step": 3760
},
{
"epoch": 1.0759900107028184,
"grad_norm": 0.752939760684967,
"learning_rate": 9.601028184100246e-05,
"loss": 0.432,
"step": 3770
},
{
"epoch": 1.0788440956118444,
"grad_norm": 0.48865851759910583,
"learning_rate": 9.597118362896169e-05,
"loss": 0.416,
"step": 3780
},
{
"epoch": 1.0816981805208705,
"grad_norm": 0.7386459112167358,
"learning_rate": 9.593190281407731e-05,
"loss": 0.4753,
"step": 3790
},
{
"epoch": 1.0845522654298965,
"grad_norm": 0.9361982345581055,
"learning_rate": 9.589243955237731e-05,
"loss": 0.4187,
"step": 3800
},
{
"epoch": 1.0874063503389226,
"grad_norm": 0.6894406080245972,
"learning_rate": 9.585279400061427e-05,
"loss": 0.3709,
"step": 3810
},
{
"epoch": 1.0902604352479486,
"grad_norm": 0.6570184826850891,
"learning_rate": 9.581296631626493e-05,
"loss": 0.37,
"step": 3820
},
{
"epoch": 1.0931145201569747,
"grad_norm": 0.5192276835441589,
"learning_rate": 9.577295665752943e-05,
"loss": 0.3979,
"step": 3830
},
{
"epoch": 1.0959686050660007,
"grad_norm": 0.6113749146461487,
"learning_rate": 9.573276518333074e-05,
"loss": 0.3881,
"step": 3840
},
{
"epoch": 1.0988226899750269,
"grad_norm": 0.8150345683097839,
"learning_rate": 9.569239205331405e-05,
"loss": 0.4079,
"step": 3850
},
{
"epoch": 1.1016767748840528,
"grad_norm": 0.5030243396759033,
"learning_rate": 9.565183742784607e-05,
"loss": 0.3626,
"step": 3860
},
{
"epoch": 1.1045308597930787,
"grad_norm": 0.7989709973335266,
"learning_rate": 9.561110146801447e-05,
"loss": 0.4244,
"step": 3870
},
{
"epoch": 1.107384944702105,
"grad_norm": 0.8960319757461548,
"learning_rate": 9.557018433562716e-05,
"loss": 0.4241,
"step": 3880
},
{
"epoch": 1.1102390296111309,
"grad_norm": 0.7229114770889282,
"learning_rate": 9.552908619321173e-05,
"loss": 0.3973,
"step": 3890
},
{
"epoch": 1.113093114520157,
"grad_norm": 0.45926737785339355,
"learning_rate": 9.548780720401473e-05,
"loss": 0.4087,
"step": 3900
},
{
"epoch": 1.115947199429183,
"grad_norm": 0.616928219795227,
"learning_rate": 9.544634753200107e-05,
"loss": 0.4076,
"step": 3910
},
{
"epoch": 1.1188012843382091,
"grad_norm": 0.8416487574577332,
"learning_rate": 9.540470734185334e-05,
"loss": 0.3991,
"step": 3920
},
{
"epoch": 1.121655369247235,
"grad_norm": 0.5629198551177979,
"learning_rate": 9.536288679897118e-05,
"loss": 0.4197,
"step": 3930
},
{
"epoch": 1.1245094541562612,
"grad_norm": 0.652466356754303,
"learning_rate": 9.532088606947063e-05,
"loss": 0.3913,
"step": 3940
},
{
"epoch": 1.1273635390652872,
"grad_norm": 0.6805722713470459,
"learning_rate": 9.527870532018342e-05,
"loss": 0.419,
"step": 3950
},
{
"epoch": 1.1302176239743131,
"grad_norm": 0.8768517971038818,
"learning_rate": 9.523634471865632e-05,
"loss": 0.4223,
"step": 3960
},
{
"epoch": 1.1330717088833393,
"grad_norm": 0.8079743981361389,
"learning_rate": 9.519380443315058e-05,
"loss": 0.4486,
"step": 3970
},
{
"epoch": 1.1359257937923652,
"grad_norm": 0.6022933125495911,
"learning_rate": 9.515108463264109e-05,
"loss": 0.4504,
"step": 3980
},
{
"epoch": 1.1387798787013914,
"grad_norm": 0.7014849185943604,
"learning_rate": 9.510818548681582e-05,
"loss": 0.4294,
"step": 3990
},
{
"epoch": 1.1416339636104174,
"grad_norm": 0.8471984267234802,
"learning_rate": 9.506510716607513e-05,
"loss": 0.4264,
"step": 4000
},
{
"epoch": 1.1444880485194435,
"grad_norm": 0.8326864838600159,
"learning_rate": 9.502184984153109e-05,
"loss": 0.416,
"step": 4010
},
{
"epoch": 1.1473421334284695,
"grad_norm": 0.614597499370575,
"learning_rate": 9.497841368500675e-05,
"loss": 0.4414,
"step": 4020
},
{
"epoch": 1.1501962183374956,
"grad_norm": 0.8020027875900269,
"learning_rate": 9.493479886903558e-05,
"loss": 0.4162,
"step": 4030
},
{
"epoch": 1.1530503032465216,
"grad_norm": 0.6523504257202148,
"learning_rate": 9.489100556686064e-05,
"loss": 0.4361,
"step": 4040
},
{
"epoch": 1.1559043881555477,
"grad_norm": 0.4413366913795471,
"learning_rate": 9.484703395243396e-05,
"loss": 0.4152,
"step": 4050
},
{
"epoch": 1.1587584730645737,
"grad_norm": 0.8899474740028381,
"learning_rate": 9.480288420041589e-05,
"loss": 0.4572,
"step": 4060
},
{
"epoch": 1.1616125579735996,
"grad_norm": 0.8028743267059326,
"learning_rate": 9.475855648617432e-05,
"loss": 0.4541,
"step": 4070
},
{
"epoch": 1.1644666428826258,
"grad_norm": 0.7049791216850281,
"learning_rate": 9.471405098578405e-05,
"loss": 0.4089,
"step": 4080
},
{
"epoch": 1.1673207277916517,
"grad_norm": 0.7701025009155273,
"learning_rate": 9.466936787602607e-05,
"loss": 0.4132,
"step": 4090
},
{
"epoch": 1.170174812700678,
"grad_norm": 0.48589470982551575,
"learning_rate": 9.462450733438682e-05,
"loss": 0.4109,
"step": 4100
},
{
"epoch": 1.1730288976097039,
"grad_norm": 0.7822790145874023,
"learning_rate": 9.457946953905757e-05,
"loss": 0.4341,
"step": 4110
},
{
"epoch": 1.17588298251873,
"grad_norm": 0.7849439382553101,
"learning_rate": 9.453425466893362e-05,
"loss": 0.4129,
"step": 4120
},
{
"epoch": 1.178737067427756,
"grad_norm": 0.6041362285614014,
"learning_rate": 9.448886290361366e-05,
"loss": 0.4222,
"step": 4130
},
{
"epoch": 1.181591152336782,
"grad_norm": 0.5522482991218567,
"learning_rate": 9.444329442339901e-05,
"loss": 0.3912,
"step": 4140
},
{
"epoch": 1.184445237245808,
"grad_norm": 0.6508781909942627,
"learning_rate": 9.439754940929295e-05,
"loss": 0.4302,
"step": 4150
},
{
"epoch": 1.187299322154834,
"grad_norm": 0.568966805934906,
"learning_rate": 9.435162804299991e-05,
"loss": 0.4466,
"step": 4160
},
{
"epoch": 1.1901534070638602,
"grad_norm": 0.4138081669807434,
"learning_rate": 9.43055305069249e-05,
"loss": 0.3965,
"step": 4170
},
{
"epoch": 1.1930074919728861,
"grad_norm": 0.6189876794815063,
"learning_rate": 9.425925698417262e-05,
"loss": 0.419,
"step": 4180
},
{
"epoch": 1.1958615768819123,
"grad_norm": 0.6760974526405334,
"learning_rate": 9.421280765854685e-05,
"loss": 0.4319,
"step": 4190
},
{
"epoch": 1.1987156617909382,
"grad_norm": 0.7758780717849731,
"learning_rate": 9.416618271454967e-05,
"loss": 0.4266,
"step": 4200
},
{
"epoch": 1.2015697466999644,
"grad_norm": 0.6547653079032898,
"learning_rate": 9.411938233738073e-05,
"loss": 0.4228,
"step": 4210
},
{
"epoch": 1.2044238316089904,
"grad_norm": 1.2232414484024048,
"learning_rate": 9.407240671293654e-05,
"loss": 0.4049,
"step": 4220
},
{
"epoch": 1.2072779165180165,
"grad_norm": 0.5262352228164673,
"learning_rate": 9.402525602780968e-05,
"loss": 0.4029,
"step": 4230
},
{
"epoch": 1.2101320014270425,
"grad_norm": 0.5081900954246521,
"learning_rate": 9.397793046928811e-05,
"loss": 0.4083,
"step": 4240
},
{
"epoch": 1.2129860863360684,
"grad_norm": 0.5323736071586609,
"learning_rate": 9.393043022535442e-05,
"loss": 0.3798,
"step": 4250
},
{
"epoch": 1.2158401712450946,
"grad_norm": 0.6737754940986633,
"learning_rate": 9.388275548468506e-05,
"loss": 0.3894,
"step": 4260
},
{
"epoch": 1.2186942561541205,
"grad_norm": 1.1628127098083496,
"learning_rate": 9.383490643664959e-05,
"loss": 0.4055,
"step": 4270
},
{
"epoch": 1.2215483410631467,
"grad_norm": 0.7762293815612793,
"learning_rate": 9.378688327130992e-05,
"loss": 0.4335,
"step": 4280
},
{
"epoch": 1.2244024259721726,
"grad_norm": 0.7017742991447449,
"learning_rate": 9.373868617941966e-05,
"loss": 0.504,
"step": 4290
},
{
"epoch": 1.2272565108811988,
"grad_norm": 0.7783657312393188,
"learning_rate": 9.369031535242316e-05,
"loss": 0.3922,
"step": 4300
},
{
"epoch": 1.2301105957902247,
"grad_norm": 0.7561664581298828,
"learning_rate": 9.364177098245497e-05,
"loss": 0.4178,
"step": 4310
},
{
"epoch": 1.2329646806992507,
"grad_norm": 0.5814104080200195,
"learning_rate": 9.35930532623389e-05,
"loss": 0.3999,
"step": 4320
},
{
"epoch": 1.2358187656082769,
"grad_norm": 0.8447505235671997,
"learning_rate": 9.354416238558736e-05,
"loss": 0.4427,
"step": 4330
},
{
"epoch": 1.2386728505173028,
"grad_norm": 0.7698172330856323,
"learning_rate": 9.349509854640054e-05,
"loss": 0.422,
"step": 4340
},
{
"epoch": 1.241526935426329,
"grad_norm": 1.1690415143966675,
"learning_rate": 9.344586193966569e-05,
"loss": 0.4099,
"step": 4350
},
{
"epoch": 1.244381020335355,
"grad_norm": 0.8412760496139526,
"learning_rate": 9.339645276095627e-05,
"loss": 0.405,
"step": 4360
},
{
"epoch": 1.247235105244381,
"grad_norm": 0.6715117692947388,
"learning_rate": 9.334687120653127e-05,
"loss": 0.4097,
"step": 4370
},
{
"epoch": 1.250089190153407,
"grad_norm": 1.0793496370315552,
"learning_rate": 9.329711747333431e-05,
"loss": 0.4117,
"step": 4380
},
{
"epoch": 1.252943275062433,
"grad_norm": 0.8151715993881226,
"learning_rate": 9.324719175899297e-05,
"loss": 0.3923,
"step": 4390
},
{
"epoch": 1.2557973599714591,
"grad_norm": 0.5719575881958008,
"learning_rate": 9.319709426181792e-05,
"loss": 0.3947,
"step": 4400
},
{
"epoch": 1.2586514448804853,
"grad_norm": 0.5285683870315552,
"learning_rate": 9.314682518080225e-05,
"loss": 0.4248,
"step": 4410
},
{
"epoch": 1.2615055297895112,
"grad_norm": 0.6833191514015198,
"learning_rate": 9.30963847156205e-05,
"loss": 0.4302,
"step": 4420
},
{
"epoch": 1.2643596146985372,
"grad_norm": 0.7360308766365051,
"learning_rate": 9.304577306662803e-05,
"loss": 0.4655,
"step": 4430
},
{
"epoch": 1.2672136996075634,
"grad_norm": 1.0034043788909912,
"learning_rate": 9.299499043486017e-05,
"loss": 0.4081,
"step": 4440
},
{
"epoch": 1.2700677845165893,
"grad_norm": 0.5989054441452026,
"learning_rate": 9.294403702203135e-05,
"loss": 0.441,
"step": 4450
},
{
"epoch": 1.2729218694256155,
"grad_norm": 0.7070958614349365,
"learning_rate": 9.289291303053443e-05,
"loss": 0.4306,
"step": 4460
},
{
"epoch": 1.2757759543346414,
"grad_norm": 0.9389038681983948,
"learning_rate": 9.284161866343979e-05,
"loss": 0.4448,
"step": 4470
},
{
"epoch": 1.2786300392436676,
"grad_norm": 0.5640192031860352,
"learning_rate": 9.279015412449456e-05,
"loss": 0.4241,
"step": 4480
},
{
"epoch": 1.2814841241526935,
"grad_norm": 0.5926091074943542,
"learning_rate": 9.273851961812186e-05,
"loss": 0.4375,
"step": 4490
},
{
"epoch": 1.2843382090617195,
"grad_norm": 0.4756394922733307,
"learning_rate": 9.268671534941988e-05,
"loss": 0.4207,
"step": 4500
},
{
"epoch": 1.2871922939707456,
"grad_norm": 0.6925960779190063,
"learning_rate": 9.263474152416116e-05,
"loss": 0.4419,
"step": 4510
},
{
"epoch": 1.2900463788797718,
"grad_norm": 0.49510657787323,
"learning_rate": 9.258259834879173e-05,
"loss": 0.3977,
"step": 4520
},
{
"epoch": 1.2929004637887977,
"grad_norm": 0.8859143257141113,
"learning_rate": 9.253028603043028e-05,
"loss": 0.451,
"step": 4530
},
{
"epoch": 1.2957545486978237,
"grad_norm": 0.5511392951011658,
"learning_rate": 9.247780477686738e-05,
"loss": 0.3907,
"step": 4540
},
{
"epoch": 1.2986086336068499,
"grad_norm": 0.7438629865646362,
"learning_rate": 9.242515479656461e-05,
"loss": 0.3776,
"step": 4550
},
{
"epoch": 1.3014627185158758,
"grad_norm": 0.6656787991523743,
"learning_rate": 9.23723362986538e-05,
"loss": 0.3857,
"step": 4560
},
{
"epoch": 1.304316803424902,
"grad_norm": 0.5865601897239685,
"learning_rate": 9.23193494929361e-05,
"loss": 0.4073,
"step": 4570
},
{
"epoch": 1.307170888333928,
"grad_norm": 0.8834959268569946,
"learning_rate": 9.22661945898812e-05,
"loss": 0.4381,
"step": 4580
},
{
"epoch": 1.310024973242954,
"grad_norm": 0.7570575475692749,
"learning_rate": 9.22128718006265e-05,
"loss": 0.4125,
"step": 4590
},
{
"epoch": 1.31287905815198,
"grad_norm": 0.668268084526062,
"learning_rate": 9.21593813369763e-05,
"loss": 0.4132,
"step": 4600
},
{
"epoch": 1.315733143061006,
"grad_norm": 0.9750699400901794,
"learning_rate": 9.210572341140086e-05,
"loss": 0.3826,
"step": 4610
},
{
"epoch": 1.3185872279700321,
"grad_norm": 0.6271708607673645,
"learning_rate": 9.205189823703569e-05,
"loss": 0.4299,
"step": 4620
},
{
"epoch": 1.321441312879058,
"grad_norm": 0.7680832743644714,
"learning_rate": 9.199790602768053e-05,
"loss": 0.4203,
"step": 4630
},
{
"epoch": 1.3242953977880842,
"grad_norm": 0.4561271667480469,
"learning_rate": 9.194374699779871e-05,
"loss": 0.3668,
"step": 4640
},
{
"epoch": 1.3271494826971102,
"grad_norm": 0.7475771903991699,
"learning_rate": 9.188942136251613e-05,
"loss": 0.4024,
"step": 4650
},
{
"epoch": 1.3300035676061364,
"grad_norm": 0.5157244205474854,
"learning_rate": 9.183492933762048e-05,
"loss": 0.4042,
"step": 4660
},
{
"epoch": 1.3328576525151623,
"grad_norm": 0.55656498670578,
"learning_rate": 9.178027113956035e-05,
"loss": 0.4231,
"step": 4670
},
{
"epoch": 1.3357117374241883,
"grad_norm": 0.8296566605567932,
"learning_rate": 9.172544698544445e-05,
"loss": 0.4136,
"step": 4680
},
{
"epoch": 1.3385658223332144,
"grad_norm": 0.8299556970596313,
"learning_rate": 9.16704570930406e-05,
"loss": 0.422,
"step": 4690
},
{
"epoch": 1.3414199072422406,
"grad_norm": 0.804410994052887,
"learning_rate": 9.161530168077503e-05,
"loss": 0.3665,
"step": 4700
},
{
"epoch": 1.3442739921512665,
"grad_norm": 0.5750541090965271,
"learning_rate": 9.155998096773137e-05,
"loss": 0.4127,
"step": 4710
},
{
"epoch": 1.3471280770602925,
"grad_norm": 0.5164769887924194,
"learning_rate": 9.150449517364988e-05,
"loss": 0.4048,
"step": 4720
},
{
"epoch": 1.3499821619693186,
"grad_norm": 0.7072139382362366,
"learning_rate": 9.144884451892655e-05,
"loss": 0.4315,
"step": 4730
},
{
"epoch": 1.3528362468783446,
"grad_norm": 0.825655996799469,
"learning_rate": 9.139302922461218e-05,
"loss": 0.4242,
"step": 4740
},
{
"epoch": 1.3556903317873708,
"grad_norm": 0.6219303607940674,
"learning_rate": 9.133704951241156e-05,
"loss": 0.4023,
"step": 4750
},
{
"epoch": 1.3585444166963967,
"grad_norm": 0.5824173092842102,
"learning_rate": 9.128090560468256e-05,
"loss": 0.4088,
"step": 4760
},
{
"epoch": 1.3613985016054229,
"grad_norm": 0.6774200201034546,
"learning_rate": 9.122459772443527e-05,
"loss": 0.4003,
"step": 4770
},
{
"epoch": 1.3642525865144488,
"grad_norm": 0.8279693722724915,
"learning_rate": 9.116812609533107e-05,
"loss": 0.3765,
"step": 4780
},
{
"epoch": 1.3671066714234748,
"grad_norm": 0.9222491383552551,
"learning_rate": 9.111149094168181e-05,
"loss": 0.4227,
"step": 4790
},
{
"epoch": 1.369960756332501,
"grad_norm": 0.7028577327728271,
"learning_rate": 9.105469248844883e-05,
"loss": 0.4406,
"step": 4800
},
{
"epoch": 1.3728148412415269,
"grad_norm": 0.4629408121109009,
"learning_rate": 9.099773096124213e-05,
"loss": 0.3831,
"step": 4810
},
{
"epoch": 1.375668926150553,
"grad_norm": 0.606282651424408,
"learning_rate": 9.094060658631948e-05,
"loss": 0.3815,
"step": 4820
},
{
"epoch": 1.378523011059579,
"grad_norm": 0.5601187348365784,
"learning_rate": 9.08833195905855e-05,
"loss": 0.3884,
"step": 4830
},
{
"epoch": 1.3813770959686051,
"grad_norm": 0.5658257007598877,
"learning_rate": 9.082587020159073e-05,
"loss": 0.4015,
"step": 4840
},
{
"epoch": 1.384231180877631,
"grad_norm": 0.7560166120529175,
"learning_rate": 9.076825864753074e-05,
"loss": 0.3553,
"step": 4850
},
{
"epoch": 1.387085265786657,
"grad_norm": 0.4828161299228668,
"learning_rate": 9.071048515724531e-05,
"loss": 0.4175,
"step": 4860
},
{
"epoch": 1.3899393506956832,
"grad_norm": 0.8136508464813232,
"learning_rate": 9.06525499602174e-05,
"loss": 0.455,
"step": 4870
},
{
"epoch": 1.3927934356047094,
"grad_norm": 0.41002386808395386,
"learning_rate": 9.059445328657228e-05,
"loss": 0.3639,
"step": 4880
},
{
"epoch": 1.3956475205137353,
"grad_norm": 0.8927071690559387,
"learning_rate": 9.053619536707664e-05,
"loss": 0.436,
"step": 4890
},
{
"epoch": 1.3985016054227613,
"grad_norm": 1.0075316429138184,
"learning_rate": 9.047777643313767e-05,
"loss": 0.4339,
"step": 4900
},
{
"epoch": 1.4013556903317874,
"grad_norm": 0.7862810492515564,
"learning_rate": 9.04191967168021e-05,
"loss": 0.4365,
"step": 4910
},
{
"epoch": 1.4042097752408134,
"grad_norm": 0.9463781714439392,
"learning_rate": 9.036045645075532e-05,
"loss": 0.4307,
"step": 4920
},
{
"epoch": 1.4070638601498395,
"grad_norm": 0.9278760552406311,
"learning_rate": 9.030155586832044e-05,
"loss": 0.3814,
"step": 4930
},
{
"epoch": 1.4099179450588655,
"grad_norm": 0.7348300814628601,
"learning_rate": 9.024249520345738e-05,
"loss": 0.4019,
"step": 4940
},
{
"epoch": 1.4127720299678916,
"grad_norm": 0.5968047380447388,
"learning_rate": 9.01832746907619e-05,
"loss": 0.439,
"step": 4950
},
{
"epoch": 1.4156261148769176,
"grad_norm": 0.527391791343689,
"learning_rate": 9.012389456546473e-05,
"loss": 0.3767,
"step": 4960
},
{
"epoch": 1.4184801997859435,
"grad_norm": 0.5745949149131775,
"learning_rate": 9.006435506343054e-05,
"loss": 0.4271,
"step": 4970
},
{
"epoch": 1.4213342846949697,
"grad_norm": 0.6323638558387756,
"learning_rate": 9.000465642115712e-05,
"loss": 0.3926,
"step": 4980
},
{
"epoch": 1.4241883696039956,
"grad_norm": 0.5993567109107971,
"learning_rate": 8.994479887577436e-05,
"loss": 0.3986,
"step": 4990
},
{
"epoch": 1.4270424545130218,
"grad_norm": 0.43338602781295776,
"learning_rate": 8.988478266504334e-05,
"loss": 0.373,
"step": 5000
},
{
"epoch": 1.4298965394220478,
"grad_norm": 0.5481635928153992,
"learning_rate": 8.982460802735535e-05,
"loss": 0.3705,
"step": 5010
},
{
"epoch": 1.432750624331074,
"grad_norm": 0.7743555903434753,
"learning_rate": 8.976427520173102e-05,
"loss": 0.3915,
"step": 5020
},
{
"epoch": 1.4356047092400999,
"grad_norm": 0.540327787399292,
"learning_rate": 8.970378442781927e-05,
"loss": 0.4166,
"step": 5030
},
{
"epoch": 1.4384587941491258,
"grad_norm": 0.6502352356910706,
"learning_rate": 8.964313594589645e-05,
"loss": 0.4134,
"step": 5040
},
{
"epoch": 1.441312879058152,
"grad_norm": 0.5395691394805908,
"learning_rate": 8.958232999686531e-05,
"loss": 0.4514,
"step": 5050
},
{
"epoch": 1.4441669639671781,
"grad_norm": 0.5091240406036377,
"learning_rate": 8.952136682225409e-05,
"loss": 0.4228,
"step": 5060
},
{
"epoch": 1.447021048876204,
"grad_norm": 0.7273949384689331,
"learning_rate": 8.946024666421557e-05,
"loss": 0.4402,
"step": 5070
},
{
"epoch": 1.44987513378523,
"grad_norm": 0.7087104916572571,
"learning_rate": 8.939896976552605e-05,
"loss": 0.3857,
"step": 5080
},
{
"epoch": 1.4527292186942562,
"grad_norm": 0.6482439041137695,
"learning_rate": 8.933753636958444e-05,
"loss": 0.4174,
"step": 5090
},
{
"epoch": 1.4555833036032821,
"grad_norm": 0.5020700693130493,
"learning_rate": 8.927594672041128e-05,
"loss": 0.4244,
"step": 5100
},
{
"epoch": 1.4584373885123083,
"grad_norm": 0.49530014395713806,
"learning_rate": 8.921420106264778e-05,
"loss": 0.4364,
"step": 5110
},
{
"epoch": 1.4612914734213343,
"grad_norm": 0.5399980545043945,
"learning_rate": 8.915229964155476e-05,
"loss": 0.3733,
"step": 5120
},
{
"epoch": 1.4641455583303604,
"grad_norm": 0.6927512288093567,
"learning_rate": 8.909024270301185e-05,
"loss": 0.4503,
"step": 5130
},
{
"epoch": 1.4669996432393864,
"grad_norm": 0.6361355781555176,
"learning_rate": 8.902803049351635e-05,
"loss": 0.4405,
"step": 5140
},
{
"epoch": 1.4698537281484123,
"grad_norm": 0.6102903485298157,
"learning_rate": 8.896566326018234e-05,
"loss": 0.4239,
"step": 5150
},
{
"epoch": 1.4727078130574385,
"grad_norm": 0.8086116313934326,
"learning_rate": 8.890314125073966e-05,
"loss": 0.4172,
"step": 5160
},
{
"epoch": 1.4755618979664644,
"grad_norm": 0.7844595909118652,
"learning_rate": 8.884046471353295e-05,
"loss": 0.409,
"step": 5170
},
{
"epoch": 1.4784159828754906,
"grad_norm": 0.5432035326957703,
"learning_rate": 8.877763389752064e-05,
"loss": 0.4197,
"step": 5180
},
{
"epoch": 1.4812700677845165,
"grad_norm": 0.6236791014671326,
"learning_rate": 8.871464905227397e-05,
"loss": 0.3988,
"step": 5190
},
{
"epoch": 1.4841241526935427,
"grad_norm": 0.5851837992668152,
"learning_rate": 8.865151042797601e-05,
"loss": 0.3919,
"step": 5200
},
{
"epoch": 1.4869782376025686,
"grad_norm": 0.6688551902770996,
"learning_rate": 8.858821827542067e-05,
"loss": 0.4116,
"step": 5210
},
{
"epoch": 1.4898323225115946,
"grad_norm": 0.574059784412384,
"learning_rate": 8.852477284601166e-05,
"loss": 0.3714,
"step": 5220
},
{
"epoch": 1.4926864074206208,
"grad_norm": 0.695894718170166,
"learning_rate": 8.846117439176158e-05,
"loss": 0.4246,
"step": 5230
},
{
"epoch": 1.495540492329647,
"grad_norm": 0.6500426530838013,
"learning_rate": 8.839742316529079e-05,
"loss": 0.4179,
"step": 5240
},
{
"epoch": 1.4983945772386729,
"grad_norm": 0.4330879747867584,
"learning_rate": 8.833351941982651e-05,
"loss": 0.4041,
"step": 5250
},
{
"epoch": 1.5012486621476988,
"grad_norm": 0.749954879283905,
"learning_rate": 8.826946340920183e-05,
"loss": 0.3884,
"step": 5260
},
{
"epoch": 1.504102747056725,
"grad_norm": 0.5958276987075806,
"learning_rate": 8.820525538785458e-05,
"loss": 0.4179,
"step": 5270
},
{
"epoch": 1.5069568319657511,
"grad_norm": 0.7096928358078003,
"learning_rate": 8.814089561082641e-05,
"loss": 0.4195,
"step": 5280
},
{
"epoch": 1.5098109168747769,
"grad_norm": 0.4753521680831909,
"learning_rate": 8.807638433376183e-05,
"loss": 0.4302,
"step": 5290
},
{
"epoch": 1.512665001783803,
"grad_norm": 1.1176053285598755,
"learning_rate": 8.801172181290702e-05,
"loss": 0.4136,
"step": 5300
},
{
"epoch": 1.5155190866928292,
"grad_norm": 0.5208104848861694,
"learning_rate": 8.794690830510901e-05,
"loss": 0.4324,
"step": 5310
},
{
"epoch": 1.5183731716018551,
"grad_norm": 1.0194308757781982,
"learning_rate": 8.788194406781452e-05,
"loss": 0.4324,
"step": 5320
},
{
"epoch": 1.521227256510881,
"grad_norm": 0.5444753766059875,
"learning_rate": 8.781682935906896e-05,
"loss": 0.4083,
"step": 5330
},
{
"epoch": 1.5240813414199073,
"grad_norm": 0.5094253420829773,
"learning_rate": 8.775156443751549e-05,
"loss": 0.4186,
"step": 5340
},
{
"epoch": 1.5269354263289334,
"grad_norm": 0.45871055126190186,
"learning_rate": 8.768614956239392e-05,
"loss": 0.3758,
"step": 5350
},
{
"epoch": 1.5297895112379594,
"grad_norm": 0.5674312710762024,
"learning_rate": 8.762058499353967e-05,
"loss": 0.447,
"step": 5360
},
{
"epoch": 1.5326435961469853,
"grad_norm": 0.5591029524803162,
"learning_rate": 8.755487099138276e-05,
"loss": 0.4304,
"step": 5370
},
{
"epoch": 1.5354976810560115,
"grad_norm": 0.8171464204788208,
"learning_rate": 8.748900781694677e-05,
"loss": 0.3959,
"step": 5380
},
{
"epoch": 1.5383517659650374,
"grad_norm": 0.8195403814315796,
"learning_rate": 8.742299573184786e-05,
"loss": 0.3785,
"step": 5390
},
{
"epoch": 1.5412058508740634,
"grad_norm": 1.0446261167526245,
"learning_rate": 8.735683499829362e-05,
"loss": 0.4787,
"step": 5400
},
{
"epoch": 1.5440599357830895,
"grad_norm": 0.8570737242698669,
"learning_rate": 8.729052587908213e-05,
"loss": 0.4181,
"step": 5410
},
{
"epoch": 1.5469140206921157,
"grad_norm": 0.6217248439788818,
"learning_rate": 8.722406863760087e-05,
"loss": 0.4379,
"step": 5420
},
{
"epoch": 1.5497681056011416,
"grad_norm": 0.4909728467464447,
"learning_rate": 8.715746353782563e-05,
"loss": 0.4083,
"step": 5430
},
{
"epoch": 1.5526221905101676,
"grad_norm": 0.6557335257530212,
"learning_rate": 8.709071084431957e-05,
"loss": 0.4228,
"step": 5440
},
{
"epoch": 1.5554762754191938,
"grad_norm": 0.7079155445098877,
"learning_rate": 8.702381082223209e-05,
"loss": 0.4686,
"step": 5450
},
{
"epoch": 1.55833036032822,
"grad_norm": 0.6752623319625854,
"learning_rate": 8.69567637372978e-05,
"loss": 0.423,
"step": 5460
},
{
"epoch": 1.5611844452372456,
"grad_norm": 0.49642160534858704,
"learning_rate": 8.688956985583545e-05,
"loss": 0.3938,
"step": 5470
},
{
"epoch": 1.5640385301462718,
"grad_norm": 0.6437910199165344,
"learning_rate": 8.682222944474687e-05,
"loss": 0.3994,
"step": 5480
},
{
"epoch": 1.566892615055298,
"grad_norm": 0.6423130631446838,
"learning_rate": 8.675474277151597e-05,
"loss": 0.4101,
"step": 5490
},
{
"epoch": 1.569746699964324,
"grad_norm": 0.5386946797370911,
"learning_rate": 8.668711010420758e-05,
"loss": 0.3989,
"step": 5500
},
{
"epoch": 1.5726007848733499,
"grad_norm": 0.75031578540802,
"learning_rate": 8.661933171146645e-05,
"loss": 0.4626,
"step": 5510
},
{
"epoch": 1.575454869782376,
"grad_norm": 0.6600304245948792,
"learning_rate": 8.655140786251617e-05,
"loss": 0.4635,
"step": 5520
},
{
"epoch": 1.5783089546914022,
"grad_norm": 0.48744162917137146,
"learning_rate": 8.648333882715814e-05,
"loss": 0.4433,
"step": 5530
},
{
"epoch": 1.5811630396004281,
"grad_norm": 0.5958801507949829,
"learning_rate": 8.641512487577037e-05,
"loss": 0.42,
"step": 5540
},
{
"epoch": 1.584017124509454,
"grad_norm": 0.7981345653533936,
"learning_rate": 8.634676627930659e-05,
"loss": 0.4308,
"step": 5550
},
{
"epoch": 1.5868712094184803,
"grad_norm": 0.5025626420974731,
"learning_rate": 8.627826330929498e-05,
"loss": 0.4404,
"step": 5560
},
{
"epoch": 1.5897252943275062,
"grad_norm": 0.6489295363426208,
"learning_rate": 8.620961623783728e-05,
"loss": 0.4002,
"step": 5570
},
{
"epoch": 1.5925793792365321,
"grad_norm": 0.8079462647438049,
"learning_rate": 8.614082533760754e-05,
"loss": 0.4113,
"step": 5580
},
{
"epoch": 1.5954334641455583,
"grad_norm": 0.7329133749008179,
"learning_rate": 8.60718908818512e-05,
"loss": 0.399,
"step": 5590
},
{
"epoch": 1.5982875490545845,
"grad_norm": 0.43324145674705505,
"learning_rate": 8.600281314438381e-05,
"loss": 0.4258,
"step": 5600
},
{
"epoch": 1.6011416339636104,
"grad_norm": 0.5767898559570312,
"learning_rate": 8.593359239959014e-05,
"loss": 0.4256,
"step": 5610
},
{
"epoch": 1.6039957188726364,
"grad_norm": 0.5638619661331177,
"learning_rate": 8.586422892242297e-05,
"loss": 0.393,
"step": 5620
},
{
"epoch": 1.6068498037816625,
"grad_norm": 0.8214574456214905,
"learning_rate": 8.579472298840204e-05,
"loss": 0.4311,
"step": 5630
},
{
"epoch": 1.6097038886906887,
"grad_norm": 0.538772702217102,
"learning_rate": 8.572507487361292e-05,
"loss": 0.4372,
"step": 5640
},
{
"epoch": 1.6125579735997146,
"grad_norm": 0.7743338346481323,
"learning_rate": 8.565528485470598e-05,
"loss": 0.4366,
"step": 5650
},
{
"epoch": 1.6154120585087406,
"grad_norm": 0.4678172171115875,
"learning_rate": 8.55853532088952e-05,
"loss": 0.423,
"step": 5660
},
{
"epoch": 1.6182661434177668,
"grad_norm": 0.7197970747947693,
"learning_rate": 8.551528021395714e-05,
"loss": 0.4073,
"step": 5670
},
{
"epoch": 1.6211202283267927,
"grad_norm": 0.6703267693519592,
"learning_rate": 8.544506614822983e-05,
"loss": 0.4026,
"step": 5680
},
{
"epoch": 1.6239743132358186,
"grad_norm": 0.4925888478755951,
"learning_rate": 8.537471129061163e-05,
"loss": 0.3971,
"step": 5690
},
{
"epoch": 1.6268283981448448,
"grad_norm": 0.5355518460273743,
"learning_rate": 8.530421592056017e-05,
"loss": 0.41,
"step": 5700
},
{
"epoch": 1.629682483053871,
"grad_norm": 0.8152324557304382,
"learning_rate": 8.523358031809116e-05,
"loss": 0.428,
"step": 5710
},
{
"epoch": 1.632536567962897,
"grad_norm": 0.536670446395874,
"learning_rate": 8.51628047637774e-05,
"loss": 0.4139,
"step": 5720
},
{
"epoch": 1.6353906528719229,
"grad_norm": 0.5895646810531616,
"learning_rate": 8.509188953874749e-05,
"loss": 0.4086,
"step": 5730
},
{
"epoch": 1.638244737780949,
"grad_norm": 0.551447331905365,
"learning_rate": 8.502083492468495e-05,
"loss": 0.3999,
"step": 5740
},
{
"epoch": 1.641098822689975,
"grad_norm": 0.5315040946006775,
"learning_rate": 8.494964120382688e-05,
"loss": 0.4092,
"step": 5750
},
{
"epoch": 1.643952907599001,
"grad_norm": 0.604598879814148,
"learning_rate": 8.487830865896294e-05,
"loss": 0.4035,
"step": 5760
},
{
"epoch": 1.646806992508027,
"grad_norm": 0.6488718390464783,
"learning_rate": 8.480683757343422e-05,
"loss": 0.4131,
"step": 5770
},
{
"epoch": 1.6496610774170533,
"grad_norm": 0.8826755881309509,
"learning_rate": 8.473522823113213e-05,
"loss": 0.4191,
"step": 5780
},
{
"epoch": 1.6525151623260792,
"grad_norm": 0.586421012878418,
"learning_rate": 8.466348091649726e-05,
"loss": 0.4388,
"step": 5790
},
{
"epoch": 1.6553692472351051,
"grad_norm": 0.6341783404350281,
"learning_rate": 8.459159591451815e-05,
"loss": 0.4084,
"step": 5800
},
{
"epoch": 1.6582233321441313,
"grad_norm": 0.6457330584526062,
"learning_rate": 8.451957351073038e-05,
"loss": 0.4073,
"step": 5810
},
{
"epoch": 1.6610774170531575,
"grad_norm": 0.5750988125801086,
"learning_rate": 8.444741399121522e-05,
"loss": 0.4185,
"step": 5820
},
{
"epoch": 1.6639315019621834,
"grad_norm": 0.46606963872909546,
"learning_rate": 8.437511764259862e-05,
"loss": 0.3942,
"step": 5830
},
{
"epoch": 1.6667855868712094,
"grad_norm": 0.6799011826515198,
"learning_rate": 8.430268475205e-05,
"loss": 0.4334,
"step": 5840
},
{
"epoch": 1.6696396717802355,
"grad_norm": 0.45308125019073486,
"learning_rate": 8.423011560728118e-05,
"loss": 0.3851,
"step": 5850
},
{
"epoch": 1.6724937566892615,
"grad_norm": 0.49569767713546753,
"learning_rate": 8.415741049654512e-05,
"loss": 0.417,
"step": 5860
},
{
"epoch": 1.6753478415982874,
"grad_norm": 0.68571537733078,
"learning_rate": 8.408456970863495e-05,
"loss": 0.3584,
"step": 5870
},
{
"epoch": 1.6782019265073136,
"grad_norm": 0.64979088306427,
"learning_rate": 8.401159353288268e-05,
"loss": 0.4009,
"step": 5880
},
{
"epoch": 1.6810560114163398,
"grad_norm": 0.5367964506149292,
"learning_rate": 8.393848225915809e-05,
"loss": 0.4307,
"step": 5890
},
{
"epoch": 1.6839100963253657,
"grad_norm": 0.5272241830825806,
"learning_rate": 8.386523617786757e-05,
"loss": 0.4088,
"step": 5900
},
{
"epoch": 1.6867641812343916,
"grad_norm": 0.6130824685096741,
"learning_rate": 8.379185557995302e-05,
"loss": 0.3827,
"step": 5910
},
{
"epoch": 1.6896182661434178,
"grad_norm": 0.4380579888820648,
"learning_rate": 8.371834075689067e-05,
"loss": 0.3958,
"step": 5920
},
{
"epoch": 1.6924723510524438,
"grad_norm": 0.5438313484191895,
"learning_rate": 8.364469200068981e-05,
"loss": 0.4141,
"step": 5930
},
{
"epoch": 1.6953264359614697,
"grad_norm": 0.5417528748512268,
"learning_rate": 8.357090960389186e-05,
"loss": 0.41,
"step": 5940
},
{
"epoch": 1.6981805208704959,
"grad_norm": 0.4851051867008209,
"learning_rate": 8.349699385956894e-05,
"loss": 0.4063,
"step": 5950
},
{
"epoch": 1.701034605779522,
"grad_norm": 0.7028306126594543,
"learning_rate": 8.3422945061323e-05,
"loss": 0.3612,
"step": 5960
},
{
"epoch": 1.703888690688548,
"grad_norm": 0.6762276291847229,
"learning_rate": 8.334876350328433e-05,
"loss": 0.4122,
"step": 5970
},
{
"epoch": 1.706742775597574,
"grad_norm": 0.8307727575302124,
"learning_rate": 8.327444948011067e-05,
"loss": 0.4257,
"step": 5980
},
{
"epoch": 1.7095968605066,
"grad_norm": 0.6955322623252869,
"learning_rate": 8.32000032869859e-05,
"loss": 0.4208,
"step": 5990
},
{
"epoch": 1.7124509454156263,
"grad_norm": 0.5121544003486633,
"learning_rate": 8.312542521961884e-05,
"loss": 0.4031,
"step": 6000
},
{
"epoch": 1.7153050303246522,
"grad_norm": 0.5445939898490906,
"learning_rate": 8.305071557424222e-05,
"loss": 0.3946,
"step": 6010
},
{
"epoch": 1.7181591152336781,
"grad_norm": 0.5593860149383545,
"learning_rate": 8.297587464761136e-05,
"loss": 0.3964,
"step": 6020
},
{
"epoch": 1.7210132001427043,
"grad_norm": 0.6780118942260742,
"learning_rate": 8.290090273700304e-05,
"loss": 0.4243,
"step": 6030
},
{
"epoch": 1.7238672850517303,
"grad_norm": 0.8964153528213501,
"learning_rate": 8.282580014021435e-05,
"loss": 0.4214,
"step": 6040
},
{
"epoch": 1.7267213699607562,
"grad_norm": 0.658841609954834,
"learning_rate": 8.275056715556144e-05,
"loss": 0.3939,
"step": 6050
},
{
"epoch": 1.7295754548697824,
"grad_norm": 0.8680906891822815,
"learning_rate": 8.267520408187843e-05,
"loss": 0.4127,
"step": 6060
},
{
"epoch": 1.7324295397788085,
"grad_norm": 0.6496133804321289,
"learning_rate": 8.259971121851616e-05,
"loss": 0.4288,
"step": 6070
},
{
"epoch": 1.7352836246878345,
"grad_norm": 0.4331784248352051,
"learning_rate": 8.252408886534092e-05,
"loss": 0.4265,
"step": 6080
},
{
"epoch": 1.7381377095968604,
"grad_norm": 0.568143904209137,
"learning_rate": 8.244833732273349e-05,
"loss": 0.4333,
"step": 6090
},
{
"epoch": 1.7409917945058866,
"grad_norm": 0.5459315776824951,
"learning_rate": 8.237245689158768e-05,
"loss": 0.4249,
"step": 6100
},
{
"epoch": 1.7438458794149128,
"grad_norm": 0.9281138777732849,
"learning_rate": 8.229644787330936e-05,
"loss": 0.4171,
"step": 6110
},
{
"epoch": 1.7466999643239385,
"grad_norm": 0.7226198315620422,
"learning_rate": 8.22203105698151e-05,
"loss": 0.3746,
"step": 6120
},
{
"epoch": 1.7495540492329646,
"grad_norm": 0.4380105435848236,
"learning_rate": 8.214404528353104e-05,
"loss": 0.3939,
"step": 6130
},
{
"epoch": 1.7524081341419908,
"grad_norm": 0.5727590322494507,
"learning_rate": 8.206765231739168e-05,
"loss": 0.4035,
"step": 6140
},
{
"epoch": 1.7552622190510168,
"grad_norm": 0.4349416494369507,
"learning_rate": 8.199113197483874e-05,
"loss": 0.3843,
"step": 6150
},
{
"epoch": 1.7581163039600427,
"grad_norm": 0.5138025879859924,
"learning_rate": 8.191448455981982e-05,
"loss": 0.3897,
"step": 6160
},
{
"epoch": 1.7609703888690689,
"grad_norm": 0.6387166380882263,
"learning_rate": 8.18377103767873e-05,
"loss": 0.384,
"step": 6170
},
{
"epoch": 1.763824473778095,
"grad_norm": 0.5494593977928162,
"learning_rate": 8.176080973069706e-05,
"loss": 0.3952,
"step": 6180
},
{
"epoch": 1.766678558687121,
"grad_norm": 0.6428438425064087,
"learning_rate": 8.168378292700738e-05,
"loss": 0.3994,
"step": 6190
},
{
"epoch": 1.769532643596147,
"grad_norm": 1.2720189094543457,
"learning_rate": 8.160663027167756e-05,
"loss": 0.416,
"step": 6200
},
{
"epoch": 1.772386728505173,
"grad_norm": 0.539422869682312,
"learning_rate": 8.152935207116688e-05,
"loss": 0.3789,
"step": 6210
},
{
"epoch": 1.775240813414199,
"grad_norm": 0.5536684393882751,
"learning_rate": 8.145194863243328e-05,
"loss": 0.4329,
"step": 6220
},
{
"epoch": 1.778094898323225,
"grad_norm": 0.7319250702857971,
"learning_rate": 8.137442026293212e-05,
"loss": 0.4298,
"step": 6230
},
{
"epoch": 1.7809489832322511,
"grad_norm": 0.6316890120506287,
"learning_rate": 8.129676727061502e-05,
"loss": 0.4081,
"step": 6240
},
{
"epoch": 1.7838030681412773,
"grad_norm": 1.2765876054763794,
"learning_rate": 8.121898996392863e-05,
"loss": 0.4158,
"step": 6250
},
{
"epoch": 1.7866571530503033,
"grad_norm": 0.48435863852500916,
"learning_rate": 8.114108865181338e-05,
"loss": 0.412,
"step": 6260
},
{
"epoch": 1.7895112379593292,
"grad_norm": 0.7326620817184448,
"learning_rate": 8.106306364370228e-05,
"loss": 0.424,
"step": 6270
},
{
"epoch": 1.7923653228683554,
"grad_norm": 0.7231389284133911,
"learning_rate": 8.098491524951966e-05,
"loss": 0.4133,
"step": 6280
},
{
"epoch": 1.7952194077773815,
"grad_norm": 0.6374312043190002,
"learning_rate": 8.090664377967993e-05,
"loss": 0.461,
"step": 6290
},
{
"epoch": 1.7980734926864073,
"grad_norm": 0.5649964213371277,
"learning_rate": 8.082824954508642e-05,
"loss": 0.3768,
"step": 6300
},
{
"epoch": 1.8009275775954334,
"grad_norm": 0.5286874175071716,
"learning_rate": 8.074973285713008e-05,
"loss": 0.4139,
"step": 6310
},
{
"epoch": 1.8037816625044596,
"grad_norm": 0.5340945720672607,
"learning_rate": 8.067109402768822e-05,
"loss": 0.4288,
"step": 6320
},
{
"epoch": 1.8066357474134855,
"grad_norm": 0.5859851837158203,
"learning_rate": 8.059233336912335e-05,
"loss": 0.3793,
"step": 6330
},
{
"epoch": 1.8094898323225115,
"grad_norm": 0.6643693447113037,
"learning_rate": 8.051345119428191e-05,
"loss": 0.3761,
"step": 6340
},
{
"epoch": 1.8123439172315376,
"grad_norm": 0.9013630151748657,
"learning_rate": 8.043444781649298e-05,
"loss": 0.4003,
"step": 6350
},
{
"epoch": 1.8151980021405638,
"grad_norm": 0.514897882938385,
"learning_rate": 8.035532354956707e-05,
"loss": 0.3897,
"step": 6360
},
{
"epoch": 1.8180520870495898,
"grad_norm": 0.8518983125686646,
"learning_rate": 8.027607870779495e-05,
"loss": 0.3865,
"step": 6370
},
{
"epoch": 1.8209061719586157,
"grad_norm": 0.5759355425834656,
"learning_rate": 8.019671360594621e-05,
"loss": 0.4141,
"step": 6380
},
{
"epoch": 1.8237602568676419,
"grad_norm": 0.5349940061569214,
"learning_rate": 8.011722855926822e-05,
"loss": 0.4281,
"step": 6390
},
{
"epoch": 1.8266143417766678,
"grad_norm": 0.5943355560302734,
"learning_rate": 8.003762388348477e-05,
"loss": 0.3837,
"step": 6400
},
{
"epoch": 1.8294684266856938,
"grad_norm": 0.531690776348114,
"learning_rate": 7.995789989479477e-05,
"loss": 0.3673,
"step": 6410
},
{
"epoch": 1.83232251159472,
"grad_norm": 0.6260749101638794,
"learning_rate": 7.987805690987114e-05,
"loss": 0.4121,
"step": 6420
},
{
"epoch": 1.835176596503746,
"grad_norm": 0.5923037528991699,
"learning_rate": 7.979809524585937e-05,
"loss": 0.4019,
"step": 6430
},
{
"epoch": 1.838030681412772,
"grad_norm": 0.693532407283783,
"learning_rate": 7.971801522037647e-05,
"loss": 0.4394,
"step": 6440
},
{
"epoch": 1.840884766321798,
"grad_norm": 0.9910332560539246,
"learning_rate": 7.963781715150951e-05,
"loss": 0.4668,
"step": 6450
},
{
"epoch": 1.8437388512308241,
"grad_norm": 0.5126996636390686,
"learning_rate": 7.955750135781447e-05,
"loss": 0.4084,
"step": 6460
},
{
"epoch": 1.8465929361398503,
"grad_norm": 0.6543841361999512,
"learning_rate": 7.947706815831497e-05,
"loss": 0.3998,
"step": 6470
},
{
"epoch": 1.849447021048876,
"grad_norm": 0.5038157105445862,
"learning_rate": 7.939651787250091e-05,
"loss": 0.4045,
"step": 6480
},
{
"epoch": 1.8523011059579022,
"grad_norm": 0.5962367057800293,
"learning_rate": 7.931585082032737e-05,
"loss": 0.4243,
"step": 6490
},
{
"epoch": 1.8551551908669284,
"grad_norm": 0.5447099804878235,
"learning_rate": 7.923506732221314e-05,
"loss": 0.4105,
"step": 6500
},
{
"epoch": 1.8580092757759543,
"grad_norm": 0.6572823524475098,
"learning_rate": 7.91541676990396e-05,
"loss": 0.4253,
"step": 6510
},
{
"epoch": 1.8608633606849803,
"grad_norm": 0.48391202092170715,
"learning_rate": 7.907315227214939e-05,
"loss": 0.4065,
"step": 6520
},
{
"epoch": 1.8637174455940064,
"grad_norm": 0.599360466003418,
"learning_rate": 7.899202136334514e-05,
"loss": 0.4022,
"step": 6530
},
{
"epoch": 1.8665715305030326,
"grad_norm": 0.6030710935592651,
"learning_rate": 7.891077529488817e-05,
"loss": 0.3849,
"step": 6540
},
{
"epoch": 1.8694256154120585,
"grad_norm": 0.6563034653663635,
"learning_rate": 7.882941438949723e-05,
"loss": 0.4047,
"step": 6550
},
{
"epoch": 1.8722797003210845,
"grad_norm": 0.7676080465316772,
"learning_rate": 7.874793897034722e-05,
"loss": 0.416,
"step": 6560
},
{
"epoch": 1.8751337852301107,
"grad_norm": 0.9442748427391052,
"learning_rate": 7.866634936106792e-05,
"loss": 0.3855,
"step": 6570
},
{
"epoch": 1.8779878701391366,
"grad_norm": 0.5683465600013733,
"learning_rate": 7.858464588574264e-05,
"loss": 0.3895,
"step": 6580
},
{
"epoch": 1.8808419550481625,
"grad_norm": 0.6074008941650391,
"learning_rate": 7.850282886890702e-05,
"loss": 0.3592,
"step": 6590
},
{
"epoch": 1.8836960399571887,
"grad_norm": 0.44344863295555115,
"learning_rate": 7.84208986355477e-05,
"loss": 0.3667,
"step": 6600
},
{
"epoch": 1.8865501248662149,
"grad_norm": 0.5034730434417725,
"learning_rate": 7.8338855511101e-05,
"loss": 0.3899,
"step": 6610
},
{
"epoch": 1.8894042097752408,
"grad_norm": 0.5974074006080627,
"learning_rate": 7.825669982145169e-05,
"loss": 0.4063,
"step": 6620
},
{
"epoch": 1.8922582946842668,
"grad_norm": 0.6027740836143494,
"learning_rate": 7.817443189293162e-05,
"loss": 0.378,
"step": 6630
},
{
"epoch": 1.895112379593293,
"grad_norm": 0.6006380319595337,
"learning_rate": 7.809205205231851e-05,
"loss": 0.4143,
"step": 6640
},
{
"epoch": 1.897966464502319,
"grad_norm": 0.4824349284172058,
"learning_rate": 7.800956062683458e-05,
"loss": 0.4059,
"step": 6650
},
{
"epoch": 1.900820549411345,
"grad_norm": 0.4044560194015503,
"learning_rate": 7.792695794414528e-05,
"loss": 0.3845,
"step": 6660
},
{
"epoch": 1.903674634320371,
"grad_norm": 0.5848098397254944,
"learning_rate": 7.784424433235803e-05,
"loss": 0.4118,
"step": 6670
},
{
"epoch": 1.9065287192293972,
"grad_norm": 0.7342092990875244,
"learning_rate": 7.776142012002077e-05,
"loss": 0.4457,
"step": 6680
},
{
"epoch": 1.909382804138423,
"grad_norm": 0.6541807651519775,
"learning_rate": 7.767848563612087e-05,
"loss": 0.4013,
"step": 6690
},
{
"epoch": 1.912236889047449,
"grad_norm": 0.6325574517250061,
"learning_rate": 7.759544121008368e-05,
"loss": 0.3985,
"step": 6700
},
{
"epoch": 1.9150909739564752,
"grad_norm": 0.6047912836074829,
"learning_rate": 7.75122871717712e-05,
"loss": 0.3748,
"step": 6710
},
{
"epoch": 1.9179450588655014,
"grad_norm": 0.8499783277511597,
"learning_rate": 7.742902385148087e-05,
"loss": 0.4232,
"step": 6720
},
{
"epoch": 1.9207991437745273,
"grad_norm": 0.6166467070579529,
"learning_rate": 7.734565157994423e-05,
"loss": 0.3894,
"step": 6730
},
{
"epoch": 1.9236532286835533,
"grad_norm": 0.6198390126228333,
"learning_rate": 7.726217068832551e-05,
"loss": 0.4304,
"step": 6740
},
{
"epoch": 1.9265073135925794,
"grad_norm": 0.5126845240592957,
"learning_rate": 7.717858150822048e-05,
"loss": 0.3671,
"step": 6750
},
{
"epoch": 1.9293613985016054,
"grad_norm": 0.6374401450157166,
"learning_rate": 7.7094884371655e-05,
"loss": 0.4355,
"step": 6760
},
{
"epoch": 1.9322154834106313,
"grad_norm": 0.5290126204490662,
"learning_rate": 7.701107961108374e-05,
"loss": 0.3938,
"step": 6770
},
{
"epoch": 1.9350695683196575,
"grad_norm": 0.5920684933662415,
"learning_rate": 7.692716755938884e-05,
"loss": 0.4078,
"step": 6780
},
{
"epoch": 1.9379236532286837,
"grad_norm": 0.8244432210922241,
"learning_rate": 7.684314854987873e-05,
"loss": 0.4081,
"step": 6790
},
{
"epoch": 1.9407777381377096,
"grad_norm": 0.7835015654563904,
"learning_rate": 7.675902291628653e-05,
"loss": 0.4596,
"step": 6800
},
{
"epoch": 1.9436318230467355,
"grad_norm": 0.7511467337608337,
"learning_rate": 7.6674790992769e-05,
"loss": 0.4151,
"step": 6810
},
{
"epoch": 1.9464859079557617,
"grad_norm": 0.7354687452316284,
"learning_rate": 7.659045311390505e-05,
"loss": 0.3712,
"step": 6820
},
{
"epoch": 1.9493399928647879,
"grad_norm": 0.7676160931587219,
"learning_rate": 7.650600961469445e-05,
"loss": 0.4129,
"step": 6830
},
{
"epoch": 1.9521940777738138,
"grad_norm": 0.5947080850601196,
"learning_rate": 7.642146083055653e-05,
"loss": 0.4174,
"step": 6840
},
{
"epoch": 1.9550481626828398,
"grad_norm": 0.7656364440917969,
"learning_rate": 7.633680709732878e-05,
"loss": 0.4157,
"step": 6850
},
{
"epoch": 1.957902247591866,
"grad_norm": 0.6544439792633057,
"learning_rate": 7.625204875126564e-05,
"loss": 0.3855,
"step": 6860
},
{
"epoch": 1.9607563325008919,
"grad_norm": 0.8954373002052307,
"learning_rate": 7.616718612903702e-05,
"loss": 0.4537,
"step": 6870
},
{
"epoch": 1.9636104174099178,
"grad_norm": 0.9667245745658875,
"learning_rate": 7.608221956772704e-05,
"loss": 0.3826,
"step": 6880
},
{
"epoch": 1.966464502318944,
"grad_norm": 0.584932804107666,
"learning_rate": 7.599714940483267e-05,
"loss": 0.446,
"step": 6890
},
{
"epoch": 1.9693185872279702,
"grad_norm": 0.568055272102356,
"learning_rate": 7.591197597826243e-05,
"loss": 0.4048,
"step": 6900
},
{
"epoch": 1.972172672136996,
"grad_norm": 0.5002406239509583,
"learning_rate": 7.582669962633498e-05,
"loss": 0.4238,
"step": 6910
},
{
"epoch": 1.975026757046022,
"grad_norm": 1.0992151498794556,
"learning_rate": 7.574132068777781e-05,
"loss": 0.4496,
"step": 6920
},
{
"epoch": 1.9778808419550482,
"grad_norm": 0.5405780076980591,
"learning_rate": 7.565583950172595e-05,
"loss": 0.3914,
"step": 6930
},
{
"epoch": 1.9807349268640742,
"grad_norm": 0.6248615980148315,
"learning_rate": 7.55702564077205e-05,
"loss": 0.4425,
"step": 6940
},
{
"epoch": 1.9835890117731,
"grad_norm": 0.5931448340415955,
"learning_rate": 7.54845717457074e-05,
"loss": 0.3759,
"step": 6950
},
{
"epoch": 1.9864430966821263,
"grad_norm": 0.6648136377334595,
"learning_rate": 7.539878585603599e-05,
"loss": 0.4167,
"step": 6960
},
{
"epoch": 1.9892971815911524,
"grad_norm": 0.6499849557876587,
"learning_rate": 7.531289907945773e-05,
"loss": 0.4238,
"step": 6970
},
{
"epoch": 1.9921512665001784,
"grad_norm": 0.5630407929420471,
"learning_rate": 7.522691175712482e-05,
"loss": 0.405,
"step": 6980
},
{
"epoch": 1.9950053514092043,
"grad_norm": 0.6160908341407776,
"learning_rate": 7.514082423058879e-05,
"loss": 0.4276,
"step": 6990
},
{
"epoch": 1.9978594363182305,
"grad_norm": 0.6159669160842896,
"learning_rate": 7.505463684179923e-05,
"loss": 0.4513,
"step": 7000
}
],
"logging_steps": 10,
"max_steps": 17515,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3.7985100656027894e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}