codingrev's picture
Upload 107 files
ec288d7 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.999785943631823,
"eval_steps": 500,
"global_step": 3503,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0028540849090260435,
"grad_norm": 7.177610874176025,
"learning_rate": 5.707762557077626e-07,
"loss": 5.7328,
"step": 10
},
{
"epoch": 0.005708169818052087,
"grad_norm": 7.953944206237793,
"learning_rate": 1.1415525114155251e-06,
"loss": 5.4553,
"step": 20
},
{
"epoch": 0.00856225472707813,
"grad_norm": 7.924602031707764,
"learning_rate": 1.7123287671232877e-06,
"loss": 5.6662,
"step": 30
},
{
"epoch": 0.011416339636104174,
"grad_norm": 8.706378936767578,
"learning_rate": 2.2831050228310503e-06,
"loss": 5.503,
"step": 40
},
{
"epoch": 0.014270424545130217,
"grad_norm": 8.828363418579102,
"learning_rate": 2.853881278538813e-06,
"loss": 5.2946,
"step": 50
},
{
"epoch": 0.01712450945415626,
"grad_norm": 11.124424934387207,
"learning_rate": 3.4246575342465754e-06,
"loss": 4.9995,
"step": 60
},
{
"epoch": 0.019978594363182306,
"grad_norm": 16.35590934753418,
"learning_rate": 3.995433789954338e-06,
"loss": 4.4733,
"step": 70
},
{
"epoch": 0.022832679272208348,
"grad_norm": 9.050875663757324,
"learning_rate": 4.566210045662101e-06,
"loss": 3.389,
"step": 80
},
{
"epoch": 0.025686764181234393,
"grad_norm": 10.061305046081543,
"learning_rate": 5.136986301369863e-06,
"loss": 2.475,
"step": 90
},
{
"epoch": 0.028540849090260435,
"grad_norm": 4.466187000274658,
"learning_rate": 5.707762557077626e-06,
"loss": 1.4959,
"step": 100
},
{
"epoch": 0.03139493399928648,
"grad_norm": 3.469721794128418,
"learning_rate": 6.278538812785388e-06,
"loss": 1.1382,
"step": 110
},
{
"epoch": 0.03424901890831252,
"grad_norm": 5.091841220855713,
"learning_rate": 6.849315068493151e-06,
"loss": 0.8312,
"step": 120
},
{
"epoch": 0.03710310381733856,
"grad_norm": 2.3277525901794434,
"learning_rate": 7.4200913242009134e-06,
"loss": 0.7045,
"step": 130
},
{
"epoch": 0.03995718872636461,
"grad_norm": 2.6082115173339844,
"learning_rate": 7.990867579908676e-06,
"loss": 0.7053,
"step": 140
},
{
"epoch": 0.042811273635390654,
"grad_norm": 2.5840566158294678,
"learning_rate": 8.561643835616438e-06,
"loss": 0.5612,
"step": 150
},
{
"epoch": 0.045665358544416695,
"grad_norm": 1.6896095275878906,
"learning_rate": 9.132420091324201e-06,
"loss": 0.536,
"step": 160
},
{
"epoch": 0.04851944345344274,
"grad_norm": 4.644432067871094,
"learning_rate": 9.703196347031963e-06,
"loss": 0.4792,
"step": 170
},
{
"epoch": 0.051373528362468786,
"grad_norm": 2.0224084854125977,
"learning_rate": 1.0273972602739726e-05,
"loss": 0.4958,
"step": 180
},
{
"epoch": 0.05422761327149483,
"grad_norm": 2.7462992668151855,
"learning_rate": 1.0844748858447488e-05,
"loss": 0.5412,
"step": 190
},
{
"epoch": 0.05708169818052087,
"grad_norm": 3.044191598892212,
"learning_rate": 1.1415525114155251e-05,
"loss": 0.5006,
"step": 200
},
{
"epoch": 0.05993578308954691,
"grad_norm": 2.856959819793701,
"learning_rate": 1.1986301369863013e-05,
"loss": 0.4759,
"step": 210
},
{
"epoch": 0.06278986799857296,
"grad_norm": 1.736061453819275,
"learning_rate": 1.2557077625570777e-05,
"loss": 0.4691,
"step": 220
},
{
"epoch": 0.065643952907599,
"grad_norm": 1.445332646369934,
"learning_rate": 1.312785388127854e-05,
"loss": 0.4628,
"step": 230
},
{
"epoch": 0.06849803781662504,
"grad_norm": 2.229682683944702,
"learning_rate": 1.3698630136986302e-05,
"loss": 0.4981,
"step": 240
},
{
"epoch": 0.07135212272565108,
"grad_norm": 4.1906232833862305,
"learning_rate": 1.4269406392694065e-05,
"loss": 0.4352,
"step": 250
},
{
"epoch": 0.07420620763467713,
"grad_norm": 3.204361915588379,
"learning_rate": 1.4840182648401827e-05,
"loss": 0.4872,
"step": 260
},
{
"epoch": 0.07706029254370317,
"grad_norm": 2.531733989715576,
"learning_rate": 1.541095890410959e-05,
"loss": 0.4652,
"step": 270
},
{
"epoch": 0.07991437745272922,
"grad_norm": 2.0460851192474365,
"learning_rate": 1.5981735159817352e-05,
"loss": 0.5005,
"step": 280
},
{
"epoch": 0.08276846236175527,
"grad_norm": 3.3922574520111084,
"learning_rate": 1.6552511415525115e-05,
"loss": 0.4896,
"step": 290
},
{
"epoch": 0.08562254727078131,
"grad_norm": 2.1291298866271973,
"learning_rate": 1.7123287671232875e-05,
"loss": 0.4396,
"step": 300
},
{
"epoch": 0.08847663217980735,
"grad_norm": 3.9003682136535645,
"learning_rate": 1.769406392694064e-05,
"loss": 0.467,
"step": 310
},
{
"epoch": 0.09133071708883339,
"grad_norm": 3.01649808883667,
"learning_rate": 1.8264840182648402e-05,
"loss": 0.4752,
"step": 320
},
{
"epoch": 0.09418480199785943,
"grad_norm": 1.4664050340652466,
"learning_rate": 1.8835616438356166e-05,
"loss": 0.4519,
"step": 330
},
{
"epoch": 0.09703888690688547,
"grad_norm": 1.8165019750595093,
"learning_rate": 1.9406392694063926e-05,
"loss": 0.4778,
"step": 340
},
{
"epoch": 0.09989297181591153,
"grad_norm": 1.9969810247421265,
"learning_rate": 1.997716894977169e-05,
"loss": 0.446,
"step": 350
},
{
"epoch": 0.10274705672493757,
"grad_norm": 1.4795584678649902,
"learning_rate": 2.0547945205479453e-05,
"loss": 0.4847,
"step": 360
},
{
"epoch": 0.10560114163396361,
"grad_norm": 1.3624811172485352,
"learning_rate": 2.1118721461187216e-05,
"loss": 0.4902,
"step": 370
},
{
"epoch": 0.10845522654298966,
"grad_norm": 2.039226770401001,
"learning_rate": 2.1689497716894976e-05,
"loss": 0.4595,
"step": 380
},
{
"epoch": 0.1113093114520157,
"grad_norm": 1.597818374633789,
"learning_rate": 2.226027397260274e-05,
"loss": 0.4481,
"step": 390
},
{
"epoch": 0.11416339636104174,
"grad_norm": 1.184199571609497,
"learning_rate": 2.2831050228310503e-05,
"loss": 0.4659,
"step": 400
},
{
"epoch": 0.11701748127006778,
"grad_norm": 2.6335010528564453,
"learning_rate": 2.3401826484018266e-05,
"loss": 0.4932,
"step": 410
},
{
"epoch": 0.11987156617909382,
"grad_norm": 1.4489330053329468,
"learning_rate": 2.3972602739726026e-05,
"loss": 0.4646,
"step": 420
},
{
"epoch": 0.12272565108811988,
"grad_norm": 2.0063445568084717,
"learning_rate": 2.454337899543379e-05,
"loss": 0.4544,
"step": 430
},
{
"epoch": 0.12557973599714592,
"grad_norm": 2.1495394706726074,
"learning_rate": 2.5114155251141553e-05,
"loss": 0.4479,
"step": 440
},
{
"epoch": 0.12843382090617195,
"grad_norm": 1.3138567209243774,
"learning_rate": 2.568493150684932e-05,
"loss": 0.4727,
"step": 450
},
{
"epoch": 0.131287905815198,
"grad_norm": 1.474777340888977,
"learning_rate": 2.625570776255708e-05,
"loss": 0.4613,
"step": 460
},
{
"epoch": 0.13414199072422406,
"grad_norm": 2.4200029373168945,
"learning_rate": 2.682648401826484e-05,
"loss": 0.4794,
"step": 470
},
{
"epoch": 0.1369960756332501,
"grad_norm": 1.476733922958374,
"learning_rate": 2.7397260273972603e-05,
"loss": 0.4764,
"step": 480
},
{
"epoch": 0.13985016054227614,
"grad_norm": 4.004658222198486,
"learning_rate": 2.796803652968037e-05,
"loss": 0.4796,
"step": 490
},
{
"epoch": 0.14270424545130217,
"grad_norm": 1.8049726486206055,
"learning_rate": 2.853881278538813e-05,
"loss": 0.4883,
"step": 500
},
{
"epoch": 0.14555833036032823,
"grad_norm": 1.7194474935531616,
"learning_rate": 2.910958904109589e-05,
"loss": 0.4749,
"step": 510
},
{
"epoch": 0.14841241526935425,
"grad_norm": 1.2992069721221924,
"learning_rate": 2.9680365296803654e-05,
"loss": 0.448,
"step": 520
},
{
"epoch": 0.1512665001783803,
"grad_norm": 1.0582475662231445,
"learning_rate": 3.025114155251142e-05,
"loss": 0.4781,
"step": 530
},
{
"epoch": 0.15412058508740634,
"grad_norm": 2.1239373683929443,
"learning_rate": 3.082191780821918e-05,
"loss": 0.5293,
"step": 540
},
{
"epoch": 0.1569746699964324,
"grad_norm": 1.3766371011734009,
"learning_rate": 3.1392694063926944e-05,
"loss": 0.4626,
"step": 550
},
{
"epoch": 0.15982875490545845,
"grad_norm": 1.2140746116638184,
"learning_rate": 3.1963470319634704e-05,
"loss": 0.4471,
"step": 560
},
{
"epoch": 0.16268283981448448,
"grad_norm": 1.7624636888504028,
"learning_rate": 3.253424657534247e-05,
"loss": 0.4691,
"step": 570
},
{
"epoch": 0.16553692472351053,
"grad_norm": 3.22637939453125,
"learning_rate": 3.310502283105023e-05,
"loss": 0.5514,
"step": 580
},
{
"epoch": 0.16839100963253656,
"grad_norm": 3.3661112785339355,
"learning_rate": 3.367579908675799e-05,
"loss": 0.5277,
"step": 590
},
{
"epoch": 0.17124509454156261,
"grad_norm": 2.009028673171997,
"learning_rate": 3.424657534246575e-05,
"loss": 0.4961,
"step": 600
},
{
"epoch": 0.17409917945058864,
"grad_norm": 1.145951271057129,
"learning_rate": 3.481735159817352e-05,
"loss": 0.484,
"step": 610
},
{
"epoch": 0.1769532643596147,
"grad_norm": 1.4422398805618286,
"learning_rate": 3.538812785388128e-05,
"loss": 0.4913,
"step": 620
},
{
"epoch": 0.17980734926864075,
"grad_norm": 1.5047334432601929,
"learning_rate": 3.5958904109589045e-05,
"loss": 0.468,
"step": 630
},
{
"epoch": 0.18266143417766678,
"grad_norm": 1.358590841293335,
"learning_rate": 3.6529680365296805e-05,
"loss": 0.4549,
"step": 640
},
{
"epoch": 0.18551551908669284,
"grad_norm": 1.2067798376083374,
"learning_rate": 3.710045662100457e-05,
"loss": 0.4726,
"step": 650
},
{
"epoch": 0.18836960399571887,
"grad_norm": 1.3069053888320923,
"learning_rate": 3.767123287671233e-05,
"loss": 0.4998,
"step": 660
},
{
"epoch": 0.19122368890474492,
"grad_norm": 1.4003655910491943,
"learning_rate": 3.824200913242009e-05,
"loss": 0.4396,
"step": 670
},
{
"epoch": 0.19407777381377095,
"grad_norm": 1.6738018989562988,
"learning_rate": 3.881278538812785e-05,
"loss": 0.4782,
"step": 680
},
{
"epoch": 0.196931858722797,
"grad_norm": 1.3846429586410522,
"learning_rate": 3.938356164383562e-05,
"loss": 0.4873,
"step": 690
},
{
"epoch": 0.19978594363182306,
"grad_norm": 1.5841200351715088,
"learning_rate": 3.995433789954338e-05,
"loss": 0.4568,
"step": 700
},
{
"epoch": 0.2026400285408491,
"grad_norm": 1.7131880521774292,
"learning_rate": 4.0525114155251145e-05,
"loss": 0.5105,
"step": 710
},
{
"epoch": 0.20549411344987514,
"grad_norm": 0.7919635772705078,
"learning_rate": 4.1095890410958905e-05,
"loss": 0.4417,
"step": 720
},
{
"epoch": 0.20834819835890117,
"grad_norm": 1.2708427906036377,
"learning_rate": 4.166666666666667e-05,
"loss": 0.454,
"step": 730
},
{
"epoch": 0.21120228326792723,
"grad_norm": 1.4845744371414185,
"learning_rate": 4.223744292237443e-05,
"loss": 0.4498,
"step": 740
},
{
"epoch": 0.21405636817695325,
"grad_norm": 1.4763469696044922,
"learning_rate": 4.280821917808219e-05,
"loss": 0.4724,
"step": 750
},
{
"epoch": 0.2169104530859793,
"grad_norm": 1.6870049238204956,
"learning_rate": 4.337899543378995e-05,
"loss": 0.4885,
"step": 760
},
{
"epoch": 0.21976453799500534,
"grad_norm": 0.9296655058860779,
"learning_rate": 4.394977168949772e-05,
"loss": 0.4669,
"step": 770
},
{
"epoch": 0.2226186229040314,
"grad_norm": 0.876143753528595,
"learning_rate": 4.452054794520548e-05,
"loss": 0.4569,
"step": 780
},
{
"epoch": 0.22547270781305745,
"grad_norm": 0.651347815990448,
"learning_rate": 4.5091324200913246e-05,
"loss": 0.4307,
"step": 790
},
{
"epoch": 0.22832679272208348,
"grad_norm": 0.7317978739738464,
"learning_rate": 4.5662100456621006e-05,
"loss": 0.4713,
"step": 800
},
{
"epoch": 0.23118087763110953,
"grad_norm": 1.1030404567718506,
"learning_rate": 4.623287671232877e-05,
"loss": 0.4387,
"step": 810
},
{
"epoch": 0.23403496254013556,
"grad_norm": 1.3097269535064697,
"learning_rate": 4.680365296803653e-05,
"loss": 0.4327,
"step": 820
},
{
"epoch": 0.23688904744916162,
"grad_norm": 1.6860710382461548,
"learning_rate": 4.737442922374429e-05,
"loss": 0.4658,
"step": 830
},
{
"epoch": 0.23974313235818764,
"grad_norm": 0.9696588516235352,
"learning_rate": 4.794520547945205e-05,
"loss": 0.485,
"step": 840
},
{
"epoch": 0.2425972172672137,
"grad_norm": 1.1960479021072388,
"learning_rate": 4.851598173515982e-05,
"loss": 0.454,
"step": 850
},
{
"epoch": 0.24545130217623976,
"grad_norm": 1.4926533699035645,
"learning_rate": 4.908675799086758e-05,
"loss": 0.5058,
"step": 860
},
{
"epoch": 0.24830538708526578,
"grad_norm": 0.5784097909927368,
"learning_rate": 4.9657534246575346e-05,
"loss": 0.4072,
"step": 870
},
{
"epoch": 0.25115947199429184,
"grad_norm": 1.0179357528686523,
"learning_rate": 5.0228310502283106e-05,
"loss": 0.4462,
"step": 880
},
{
"epoch": 0.2540135569033179,
"grad_norm": 0.9068560600280762,
"learning_rate": 5.0799086757990866e-05,
"loss": 0.4674,
"step": 890
},
{
"epoch": 0.2568676418123439,
"grad_norm": 1.4015111923217773,
"learning_rate": 5.136986301369864e-05,
"loss": 0.4528,
"step": 900
},
{
"epoch": 0.25972172672136995,
"grad_norm": 0.6935715675354004,
"learning_rate": 5.19406392694064e-05,
"loss": 0.4841,
"step": 910
},
{
"epoch": 0.262575811630396,
"grad_norm": 1.1978791952133179,
"learning_rate": 5.251141552511416e-05,
"loss": 0.4707,
"step": 920
},
{
"epoch": 0.26542989653942206,
"grad_norm": 0.9195595979690552,
"learning_rate": 5.308219178082192e-05,
"loss": 0.4538,
"step": 930
},
{
"epoch": 0.2682839814484481,
"grad_norm": 1.5978795289993286,
"learning_rate": 5.365296803652968e-05,
"loss": 0.4536,
"step": 940
},
{
"epoch": 0.2711380663574741,
"grad_norm": 1.3926838636398315,
"learning_rate": 5.422374429223745e-05,
"loss": 0.49,
"step": 950
},
{
"epoch": 0.2739921512665002,
"grad_norm": 0.575034499168396,
"learning_rate": 5.479452054794521e-05,
"loss": 0.5136,
"step": 960
},
{
"epoch": 0.27684623617552623,
"grad_norm": 0.6068254709243774,
"learning_rate": 5.536529680365297e-05,
"loss": 0.4552,
"step": 970
},
{
"epoch": 0.2797003210845523,
"grad_norm": 0.7864544987678528,
"learning_rate": 5.593607305936074e-05,
"loss": 0.492,
"step": 980
},
{
"epoch": 0.2825544059935783,
"grad_norm": 1.3433411121368408,
"learning_rate": 5.65068493150685e-05,
"loss": 0.4795,
"step": 990
},
{
"epoch": 0.28540849090260434,
"grad_norm": 0.7735553979873657,
"learning_rate": 5.707762557077626e-05,
"loss": 0.4538,
"step": 1000
},
{
"epoch": 0.2882625758116304,
"grad_norm": 0.6869261264801025,
"learning_rate": 5.764840182648402e-05,
"loss": 0.4821,
"step": 1010
},
{
"epoch": 0.29111666072065645,
"grad_norm": 1.081992506980896,
"learning_rate": 5.821917808219178e-05,
"loss": 0.4669,
"step": 1020
},
{
"epoch": 0.2939707456296825,
"grad_norm": 0.8736602663993835,
"learning_rate": 5.878995433789955e-05,
"loss": 0.4253,
"step": 1030
},
{
"epoch": 0.2968248305387085,
"grad_norm": 1.1186368465423584,
"learning_rate": 5.936073059360731e-05,
"loss": 0.469,
"step": 1040
},
{
"epoch": 0.29967891544773456,
"grad_norm": 0.6147318482398987,
"learning_rate": 5.993150684931507e-05,
"loss": 0.4573,
"step": 1050
},
{
"epoch": 0.3025330003567606,
"grad_norm": 1.0282665491104126,
"learning_rate": 6.050228310502284e-05,
"loss": 0.4448,
"step": 1060
},
{
"epoch": 0.3053870852657867,
"grad_norm": 1.3858377933502197,
"learning_rate": 6.10730593607306e-05,
"loss": 0.4453,
"step": 1070
},
{
"epoch": 0.3082411701748127,
"grad_norm": 0.9879347085952759,
"learning_rate": 6.164383561643835e-05,
"loss": 0.4441,
"step": 1080
},
{
"epoch": 0.31109525508383873,
"grad_norm": 0.7240331172943115,
"learning_rate": 6.221461187214613e-05,
"loss": 0.4408,
"step": 1090
},
{
"epoch": 0.3139493399928648,
"grad_norm": 0.7207432389259338,
"learning_rate": 6.278538812785389e-05,
"loss": 0.4753,
"step": 1100
},
{
"epoch": 0.31680342490189084,
"grad_norm": 1.230485439300537,
"learning_rate": 6.335616438356165e-05,
"loss": 0.4815,
"step": 1110
},
{
"epoch": 0.3196575098109169,
"grad_norm": 0.7309743762016296,
"learning_rate": 6.392694063926941e-05,
"loss": 0.424,
"step": 1120
},
{
"epoch": 0.3225115947199429,
"grad_norm": 0.919762134552002,
"learning_rate": 6.449771689497717e-05,
"loss": 0.4576,
"step": 1130
},
{
"epoch": 0.32536567962896895,
"grad_norm": 0.6879755258560181,
"learning_rate": 6.506849315068494e-05,
"loss": 0.4349,
"step": 1140
},
{
"epoch": 0.328219764537995,
"grad_norm": 0.8196412920951843,
"learning_rate": 6.56392694063927e-05,
"loss": 0.4955,
"step": 1150
},
{
"epoch": 0.33107384944702106,
"grad_norm": 1.016493558883667,
"learning_rate": 6.621004566210046e-05,
"loss": 0.4656,
"step": 1160
},
{
"epoch": 0.3339279343560471,
"grad_norm": 0.560720682144165,
"learning_rate": 6.678082191780822e-05,
"loss": 0.4251,
"step": 1170
},
{
"epoch": 0.3367820192650731,
"grad_norm": 0.5806992053985596,
"learning_rate": 6.735159817351598e-05,
"loss": 0.4293,
"step": 1180
},
{
"epoch": 0.3396361041740992,
"grad_norm": 0.9094499349594116,
"learning_rate": 6.792237442922374e-05,
"loss": 0.4143,
"step": 1190
},
{
"epoch": 0.34249018908312523,
"grad_norm": 0.5154547691345215,
"learning_rate": 6.84931506849315e-05,
"loss": 0.4682,
"step": 1200
},
{
"epoch": 0.3453442739921513,
"grad_norm": 0.7153740525245667,
"learning_rate": 6.906392694063926e-05,
"loss": 0.4717,
"step": 1210
},
{
"epoch": 0.3481983589011773,
"grad_norm": 0.6453626155853271,
"learning_rate": 6.963470319634704e-05,
"loss": 0.4609,
"step": 1220
},
{
"epoch": 0.35105244381020334,
"grad_norm": 0.6944445371627808,
"learning_rate": 7.02054794520548e-05,
"loss": 0.4446,
"step": 1230
},
{
"epoch": 0.3539065287192294,
"grad_norm": 0.5920615196228027,
"learning_rate": 7.077625570776256e-05,
"loss": 0.4863,
"step": 1240
},
{
"epoch": 0.35676061362825545,
"grad_norm": 0.8108448386192322,
"learning_rate": 7.134703196347033e-05,
"loss": 0.4713,
"step": 1250
},
{
"epoch": 0.3596146985372815,
"grad_norm": 0.6366176605224609,
"learning_rate": 7.191780821917809e-05,
"loss": 0.4338,
"step": 1260
},
{
"epoch": 0.3624687834463075,
"grad_norm": 0.9244728088378906,
"learning_rate": 7.248858447488585e-05,
"loss": 0.4319,
"step": 1270
},
{
"epoch": 0.36532286835533356,
"grad_norm": 0.7847846150398254,
"learning_rate": 7.305936073059361e-05,
"loss": 0.4692,
"step": 1280
},
{
"epoch": 0.3681769532643596,
"grad_norm": 0.9149333238601685,
"learning_rate": 7.363013698630137e-05,
"loss": 0.4677,
"step": 1290
},
{
"epoch": 0.3710310381733857,
"grad_norm": 0.7170394659042358,
"learning_rate": 7.420091324200914e-05,
"loss": 0.4261,
"step": 1300
},
{
"epoch": 0.3738851230824117,
"grad_norm": 1.0459473133087158,
"learning_rate": 7.47716894977169e-05,
"loss": 0.4709,
"step": 1310
},
{
"epoch": 0.37673920799143773,
"grad_norm": 1.1265268325805664,
"learning_rate": 7.534246575342466e-05,
"loss": 0.4799,
"step": 1320
},
{
"epoch": 0.3795932929004638,
"grad_norm": 0.7591957449913025,
"learning_rate": 7.591324200913242e-05,
"loss": 0.4438,
"step": 1330
},
{
"epoch": 0.38244737780948984,
"grad_norm": 0.5419506430625916,
"learning_rate": 7.648401826484018e-05,
"loss": 0.4578,
"step": 1340
},
{
"epoch": 0.3853014627185159,
"grad_norm": 0.5713040828704834,
"learning_rate": 7.705479452054794e-05,
"loss": 0.4633,
"step": 1350
},
{
"epoch": 0.3881555476275419,
"grad_norm": 0.9401557445526123,
"learning_rate": 7.76255707762557e-05,
"loss": 0.4517,
"step": 1360
},
{
"epoch": 0.39100963253656795,
"grad_norm": 0.9244952201843262,
"learning_rate": 7.819634703196348e-05,
"loss": 0.4429,
"step": 1370
},
{
"epoch": 0.393863717445594,
"grad_norm": 0.6746932864189148,
"learning_rate": 7.876712328767124e-05,
"loss": 0.4595,
"step": 1380
},
{
"epoch": 0.39671780235462006,
"grad_norm": 0.7216023206710815,
"learning_rate": 7.9337899543379e-05,
"loss": 0.4364,
"step": 1390
},
{
"epoch": 0.3995718872636461,
"grad_norm": 0.8057281374931335,
"learning_rate": 7.990867579908676e-05,
"loss": 0.4532,
"step": 1400
},
{
"epoch": 0.4024259721726721,
"grad_norm": 0.5679146647453308,
"learning_rate": 8.047945205479453e-05,
"loss": 0.42,
"step": 1410
},
{
"epoch": 0.4052800570816982,
"grad_norm": 0.45307865738868713,
"learning_rate": 8.105022831050229e-05,
"loss": 0.4454,
"step": 1420
},
{
"epoch": 0.40813414199072423,
"grad_norm": 0.559577226638794,
"learning_rate": 8.162100456621005e-05,
"loss": 0.4723,
"step": 1430
},
{
"epoch": 0.4109882268997503,
"grad_norm": 1.2319309711456299,
"learning_rate": 8.219178082191781e-05,
"loss": 0.4659,
"step": 1440
},
{
"epoch": 0.4138423118087763,
"grad_norm": 0.6527321338653564,
"learning_rate": 8.276255707762558e-05,
"loss": 0.4235,
"step": 1450
},
{
"epoch": 0.41669639671780234,
"grad_norm": 0.6203667521476746,
"learning_rate": 8.333333333333334e-05,
"loss": 0.4565,
"step": 1460
},
{
"epoch": 0.4195504816268284,
"grad_norm": 0.9035118818283081,
"learning_rate": 8.39041095890411e-05,
"loss": 0.4528,
"step": 1470
},
{
"epoch": 0.42240456653585445,
"grad_norm": 1.0064053535461426,
"learning_rate": 8.447488584474886e-05,
"loss": 0.4358,
"step": 1480
},
{
"epoch": 0.4252586514448805,
"grad_norm": 0.7333278656005859,
"learning_rate": 8.504566210045662e-05,
"loss": 0.4523,
"step": 1490
},
{
"epoch": 0.4281127363539065,
"grad_norm": 0.7744036912918091,
"learning_rate": 8.561643835616438e-05,
"loss": 0.4208,
"step": 1500
},
{
"epoch": 0.43096682126293256,
"grad_norm": 1.0139415264129639,
"learning_rate": 8.618721461187214e-05,
"loss": 0.4566,
"step": 1510
},
{
"epoch": 0.4338209061719586,
"grad_norm": 0.83629310131073,
"learning_rate": 8.67579908675799e-05,
"loss": 0.4434,
"step": 1520
},
{
"epoch": 0.4366749910809847,
"grad_norm": 0.7974062561988831,
"learning_rate": 8.732876712328768e-05,
"loss": 0.4562,
"step": 1530
},
{
"epoch": 0.4395290759900107,
"grad_norm": 0.6136748194694519,
"learning_rate": 8.789954337899544e-05,
"loss": 0.4556,
"step": 1540
},
{
"epoch": 0.44238316089903673,
"grad_norm": 0.6411725282669067,
"learning_rate": 8.84703196347032e-05,
"loss": 0.4601,
"step": 1550
},
{
"epoch": 0.4452372458080628,
"grad_norm": 0.9157238006591797,
"learning_rate": 8.904109589041096e-05,
"loss": 0.4473,
"step": 1560
},
{
"epoch": 0.44809133071708884,
"grad_norm": 0.5935428142547607,
"learning_rate": 8.961187214611873e-05,
"loss": 0.4374,
"step": 1570
},
{
"epoch": 0.4509454156261149,
"grad_norm": 0.6518482565879822,
"learning_rate": 9.018264840182649e-05,
"loss": 0.4898,
"step": 1580
},
{
"epoch": 0.4537995005351409,
"grad_norm": 0.990638792514801,
"learning_rate": 9.075342465753425e-05,
"loss": 0.4623,
"step": 1590
},
{
"epoch": 0.45665358544416695,
"grad_norm": 0.7768042683601379,
"learning_rate": 9.132420091324201e-05,
"loss": 0.4203,
"step": 1600
},
{
"epoch": 0.459507670353193,
"grad_norm": 0.49145108461380005,
"learning_rate": 9.189497716894979e-05,
"loss": 0.4454,
"step": 1610
},
{
"epoch": 0.46236175526221907,
"grad_norm": 0.7254750728607178,
"learning_rate": 9.246575342465755e-05,
"loss": 0.4204,
"step": 1620
},
{
"epoch": 0.4652158401712451,
"grad_norm": 0.5650269985198975,
"learning_rate": 9.30365296803653e-05,
"loss": 0.4431,
"step": 1630
},
{
"epoch": 0.4680699250802711,
"grad_norm": 0.9648821353912354,
"learning_rate": 9.360730593607307e-05,
"loss": 0.4389,
"step": 1640
},
{
"epoch": 0.4709240099892972,
"grad_norm": 0.6625390648841858,
"learning_rate": 9.417808219178083e-05,
"loss": 0.4491,
"step": 1650
},
{
"epoch": 0.47377809489832323,
"grad_norm": 0.8872827291488647,
"learning_rate": 9.474885844748859e-05,
"loss": 0.443,
"step": 1660
},
{
"epoch": 0.4766321798073493,
"grad_norm": 0.6523913741111755,
"learning_rate": 9.531963470319635e-05,
"loss": 0.421,
"step": 1670
},
{
"epoch": 0.4794862647163753,
"grad_norm": 0.48721668124198914,
"learning_rate": 9.58904109589041e-05,
"loss": 0.4762,
"step": 1680
},
{
"epoch": 0.48234034962540134,
"grad_norm": 0.808788537979126,
"learning_rate": 9.646118721461188e-05,
"loss": 0.4383,
"step": 1690
},
{
"epoch": 0.4851944345344274,
"grad_norm": 1.0816537141799927,
"learning_rate": 9.703196347031964e-05,
"loss": 0.4273,
"step": 1700
},
{
"epoch": 0.48804851944345345,
"grad_norm": 0.9300740361213684,
"learning_rate": 9.76027397260274e-05,
"loss": 0.4383,
"step": 1710
},
{
"epoch": 0.4909026043524795,
"grad_norm": 0.6394598484039307,
"learning_rate": 9.817351598173516e-05,
"loss": 0.4864,
"step": 1720
},
{
"epoch": 0.4937566892615055,
"grad_norm": 0.6434561014175415,
"learning_rate": 9.874429223744292e-05,
"loss": 0.4815,
"step": 1730
},
{
"epoch": 0.49661077417053157,
"grad_norm": 0.7413051128387451,
"learning_rate": 9.931506849315069e-05,
"loss": 0.465,
"step": 1740
},
{
"epoch": 0.4994648590795576,
"grad_norm": 0.6108921766281128,
"learning_rate": 9.988584474885845e-05,
"loss": 0.4645,
"step": 1750
},
{
"epoch": 0.5023189439885837,
"grad_norm": 0.6062872409820557,
"learning_rate": 9.999993644614703e-05,
"loss": 0.4244,
"step": 1760
},
{
"epoch": 0.5051730288976097,
"grad_norm": 0.7179297208786011,
"learning_rate": 9.999967825889622e-05,
"loss": 0.437,
"step": 1770
},
{
"epoch": 0.5080271138066358,
"grad_norm": 0.7407391667366028,
"learning_rate": 9.999922146715655e-05,
"loss": 0.4506,
"step": 1780
},
{
"epoch": 0.5108811987156618,
"grad_norm": 0.38226214051246643,
"learning_rate": 9.999856607274242e-05,
"loss": 0.3888,
"step": 1790
},
{
"epoch": 0.5137352836246878,
"grad_norm": 0.8683974146842957,
"learning_rate": 9.999771207825713e-05,
"loss": 0.445,
"step": 1800
},
{
"epoch": 0.5165893685337138,
"grad_norm": 0.6052406430244446,
"learning_rate": 9.999665948709287e-05,
"loss": 0.393,
"step": 1810
},
{
"epoch": 0.5194434534427399,
"grad_norm": 0.9383887052536011,
"learning_rate": 9.999540830343064e-05,
"loss": 0.4655,
"step": 1820
},
{
"epoch": 0.522297538351766,
"grad_norm": 0.6511524319648743,
"learning_rate": 9.999395853224028e-05,
"loss": 0.4633,
"step": 1830
},
{
"epoch": 0.525151623260792,
"grad_norm": 0.3971940875053406,
"learning_rate": 9.999231017928045e-05,
"loss": 0.408,
"step": 1840
},
{
"epoch": 0.5280057081698181,
"grad_norm": 0.6968293786048889,
"learning_rate": 9.99904632510986e-05,
"loss": 0.4559,
"step": 1850
},
{
"epoch": 0.5308597930788441,
"grad_norm": 0.47270509600639343,
"learning_rate": 9.998841775503096e-05,
"loss": 0.4257,
"step": 1860
},
{
"epoch": 0.5337138779878702,
"grad_norm": 0.7707135081291199,
"learning_rate": 9.998617369920244e-05,
"loss": 0.4326,
"step": 1870
},
{
"epoch": 0.5365679628968962,
"grad_norm": 0.720313549041748,
"learning_rate": 9.998373109252672e-05,
"loss": 0.4257,
"step": 1880
},
{
"epoch": 0.5394220478059222,
"grad_norm": 0.9472813606262207,
"learning_rate": 9.998108994470612e-05,
"loss": 0.4477,
"step": 1890
},
{
"epoch": 0.5422761327149482,
"grad_norm": 0.5608553290367126,
"learning_rate": 9.997825026623155e-05,
"loss": 0.4542,
"step": 1900
},
{
"epoch": 0.5451302176239743,
"grad_norm": 1.084322452545166,
"learning_rate": 9.997521206838255e-05,
"loss": 0.4208,
"step": 1910
},
{
"epoch": 0.5479843025330003,
"grad_norm": 0.6105670928955078,
"learning_rate": 9.997197536322722e-05,
"loss": 0.4282,
"step": 1920
},
{
"epoch": 0.5508383874420264,
"grad_norm": 0.5767210721969604,
"learning_rate": 9.99685401636221e-05,
"loss": 0.455,
"step": 1930
},
{
"epoch": 0.5536924723510525,
"grad_norm": 0.5981484055519104,
"learning_rate": 9.996490648321219e-05,
"loss": 0.4424,
"step": 1940
},
{
"epoch": 0.5565465572600785,
"grad_norm": 0.7510049939155579,
"learning_rate": 9.996107433643092e-05,
"loss": 0.4533,
"step": 1950
},
{
"epoch": 0.5594006421691046,
"grad_norm": 0.9091824889183044,
"learning_rate": 9.99570437385e-05,
"loss": 0.4232,
"step": 1960
},
{
"epoch": 0.5622547270781306,
"grad_norm": 0.5205058455467224,
"learning_rate": 9.995281470542943e-05,
"loss": 0.4501,
"step": 1970
},
{
"epoch": 0.5651088119871566,
"grad_norm": 0.5514882206916809,
"learning_rate": 9.994838725401743e-05,
"loss": 0.4258,
"step": 1980
},
{
"epoch": 0.5679628968961826,
"grad_norm": 0.6759376525878906,
"learning_rate": 9.994376140185032e-05,
"loss": 0.4764,
"step": 1990
},
{
"epoch": 0.5708169818052087,
"grad_norm": 0.5412331223487854,
"learning_rate": 9.993893716730255e-05,
"loss": 0.4606,
"step": 2000
},
{
"epoch": 0.5736710667142347,
"grad_norm": 0.7248188257217407,
"learning_rate": 9.993391456953651e-05,
"loss": 0.4417,
"step": 2010
},
{
"epoch": 0.5765251516232608,
"grad_norm": 0.6278032660484314,
"learning_rate": 9.992869362850256e-05,
"loss": 0.4354,
"step": 2020
},
{
"epoch": 0.5793792365322868,
"grad_norm": 0.7813991904258728,
"learning_rate": 9.992327436493886e-05,
"loss": 0.4386,
"step": 2030
},
{
"epoch": 0.5822333214413129,
"grad_norm": 0.6477668881416321,
"learning_rate": 9.991765680037137e-05,
"loss": 0.3989,
"step": 2040
},
{
"epoch": 0.585087406350339,
"grad_norm": 0.6790643930435181,
"learning_rate": 9.991184095711368e-05,
"loss": 0.4251,
"step": 2050
},
{
"epoch": 0.587941491259365,
"grad_norm": 0.6338703632354736,
"learning_rate": 9.990582685826701e-05,
"loss": 0.4251,
"step": 2060
},
{
"epoch": 0.590795576168391,
"grad_norm": 0.5298501253128052,
"learning_rate": 9.989961452772005e-05,
"loss": 0.4457,
"step": 2070
},
{
"epoch": 0.593649661077417,
"grad_norm": 0.547738790512085,
"learning_rate": 9.98932039901489e-05,
"loss": 0.447,
"step": 2080
},
{
"epoch": 0.5965037459864431,
"grad_norm": 0.6610301733016968,
"learning_rate": 9.988659527101693e-05,
"loss": 0.43,
"step": 2090
},
{
"epoch": 0.5993578308954691,
"grad_norm": 0.5846606492996216,
"learning_rate": 9.987978839657475e-05,
"loss": 0.4594,
"step": 2100
},
{
"epoch": 0.6022119158044952,
"grad_norm": 0.5017244815826416,
"learning_rate": 9.987278339386006e-05,
"loss": 0.4136,
"step": 2110
},
{
"epoch": 0.6050660007135212,
"grad_norm": 0.6239372491836548,
"learning_rate": 9.986558029069753e-05,
"loss": 0.4765,
"step": 2120
},
{
"epoch": 0.6079200856225473,
"grad_norm": 0.4270966053009033,
"learning_rate": 9.98581791156987e-05,
"loss": 0.421,
"step": 2130
},
{
"epoch": 0.6107741705315733,
"grad_norm": 0.9911853075027466,
"learning_rate": 9.985057989826193e-05,
"loss": 0.4398,
"step": 2140
},
{
"epoch": 0.6136282554405994,
"grad_norm": 0.5237308740615845,
"learning_rate": 9.984278266857215e-05,
"loss": 0.4036,
"step": 2150
},
{
"epoch": 0.6164823403496253,
"grad_norm": 0.7680717706680298,
"learning_rate": 9.983478745760088e-05,
"loss": 0.4579,
"step": 2160
},
{
"epoch": 0.6193364252586514,
"grad_norm": 0.6343058347702026,
"learning_rate": 9.982659429710601e-05,
"loss": 0.4119,
"step": 2170
},
{
"epoch": 0.6221905101676775,
"grad_norm": 0.4680488109588623,
"learning_rate": 9.981820321963171e-05,
"loss": 0.4047,
"step": 2180
},
{
"epoch": 0.6250445950767035,
"grad_norm": 0.4934472143650055,
"learning_rate": 9.980961425850832e-05,
"loss": 0.4377,
"step": 2190
},
{
"epoch": 0.6278986799857296,
"grad_norm": 0.6324202418327332,
"learning_rate": 9.980082744785219e-05,
"loss": 0.4417,
"step": 2200
},
{
"epoch": 0.6307527648947556,
"grad_norm": 0.5263383984565735,
"learning_rate": 9.979184282256553e-05,
"loss": 0.4166,
"step": 2210
},
{
"epoch": 0.6336068498037817,
"grad_norm": 1.1357102394104004,
"learning_rate": 9.978266041833631e-05,
"loss": 0.4536,
"step": 2220
},
{
"epoch": 0.6364609347128077,
"grad_norm": 1.1446198225021362,
"learning_rate": 9.977328027163811e-05,
"loss": 0.4449,
"step": 2230
},
{
"epoch": 0.6393150196218338,
"grad_norm": 0.4317645728588104,
"learning_rate": 9.976370241972995e-05,
"loss": 0.4161,
"step": 2240
},
{
"epoch": 0.6421691045308598,
"grad_norm": 0.6288971304893494,
"learning_rate": 9.975392690065616e-05,
"loss": 0.4469,
"step": 2250
},
{
"epoch": 0.6450231894398858,
"grad_norm": 0.5151458978652954,
"learning_rate": 9.974395375324622e-05,
"loss": 0.4478,
"step": 2260
},
{
"epoch": 0.6478772743489118,
"grad_norm": 0.6855816841125488,
"learning_rate": 9.973378301711465e-05,
"loss": 0.437,
"step": 2270
},
{
"epoch": 0.6507313592579379,
"grad_norm": 0.5615636110305786,
"learning_rate": 9.972341473266074e-05,
"loss": 0.4475,
"step": 2280
},
{
"epoch": 0.653585444166964,
"grad_norm": 0.758758544921875,
"learning_rate": 9.971284894106856e-05,
"loss": 0.4066,
"step": 2290
},
{
"epoch": 0.65643952907599,
"grad_norm": 0.5864000916481018,
"learning_rate": 9.970208568430662e-05,
"loss": 0.4327,
"step": 2300
},
{
"epoch": 0.6592936139850161,
"grad_norm": 0.5502752065658569,
"learning_rate": 9.969112500512784e-05,
"loss": 0.4266,
"step": 2310
},
{
"epoch": 0.6621476988940421,
"grad_norm": 0.9890068769454956,
"learning_rate": 9.96799669470693e-05,
"loss": 0.4279,
"step": 2320
},
{
"epoch": 0.6650017838030682,
"grad_norm": 0.6124778389930725,
"learning_rate": 9.966861155445207e-05,
"loss": 0.4138,
"step": 2330
},
{
"epoch": 0.6678558687120942,
"grad_norm": 0.6781124472618103,
"learning_rate": 9.965705887238109e-05,
"loss": 0.4264,
"step": 2340
},
{
"epoch": 0.6707099536211202,
"grad_norm": 0.5691377520561218,
"learning_rate": 9.964530894674495e-05,
"loss": 0.4407,
"step": 2350
},
{
"epoch": 0.6735640385301462,
"grad_norm": 0.5900623202323914,
"learning_rate": 9.963336182421572e-05,
"loss": 0.4134,
"step": 2360
},
{
"epoch": 0.6764181234391723,
"grad_norm": 0.9385659694671631,
"learning_rate": 9.962121755224874e-05,
"loss": 0.4488,
"step": 2370
},
{
"epoch": 0.6792722083481983,
"grad_norm": 0.5161449909210205,
"learning_rate": 9.960887617908245e-05,
"loss": 0.4443,
"step": 2380
},
{
"epoch": 0.6821262932572244,
"grad_norm": 0.636301577091217,
"learning_rate": 9.959633775373823e-05,
"loss": 0.4416,
"step": 2390
},
{
"epoch": 0.6849803781662505,
"grad_norm": 0.666099488735199,
"learning_rate": 9.958360232602013e-05,
"loss": 0.4629,
"step": 2400
},
{
"epoch": 0.6878344630752765,
"grad_norm": 0.8457536697387695,
"learning_rate": 9.957066994651474e-05,
"loss": 0.4516,
"step": 2410
},
{
"epoch": 0.6906885479843026,
"grad_norm": 0.753018319606781,
"learning_rate": 9.955754066659096e-05,
"loss": 0.4199,
"step": 2420
},
{
"epoch": 0.6935426328933286,
"grad_norm": 0.6624453663825989,
"learning_rate": 9.954421453839983e-05,
"loss": 0.4301,
"step": 2430
},
{
"epoch": 0.6963967178023546,
"grad_norm": 0.5142033696174622,
"learning_rate": 9.953069161487422e-05,
"loss": 0.445,
"step": 2440
},
{
"epoch": 0.6992508027113806,
"grad_norm": 0.6645984053611755,
"learning_rate": 9.95169719497288e-05,
"loss": 0.4784,
"step": 2450
},
{
"epoch": 0.7021048876204067,
"grad_norm": 0.767155647277832,
"learning_rate": 9.950305559745963e-05,
"loss": 0.4029,
"step": 2460
},
{
"epoch": 0.7049589725294327,
"grad_norm": 0.7413857579231262,
"learning_rate": 9.948894261334408e-05,
"loss": 0.4558,
"step": 2470
},
{
"epoch": 0.7078130574384588,
"grad_norm": 0.6691272854804993,
"learning_rate": 9.947463305344051e-05,
"loss": 0.4196,
"step": 2480
},
{
"epoch": 0.7106671423474848,
"grad_norm": 0.7997813820838928,
"learning_rate": 9.946012697458819e-05,
"loss": 0.4381,
"step": 2490
},
{
"epoch": 0.7135212272565109,
"grad_norm": 1.3109943866729736,
"learning_rate": 9.944542443440693e-05,
"loss": 0.4342,
"step": 2500
},
{
"epoch": 0.716375312165537,
"grad_norm": 0.6375836730003357,
"learning_rate": 9.94305254912969e-05,
"loss": 0.4314,
"step": 2510
},
{
"epoch": 0.719229397074563,
"grad_norm": 0.5794687271118164,
"learning_rate": 9.941543020443843e-05,
"loss": 0.444,
"step": 2520
},
{
"epoch": 0.722083481983589,
"grad_norm": 0.5072224736213684,
"learning_rate": 9.940013863379173e-05,
"loss": 0.4442,
"step": 2530
},
{
"epoch": 0.724937566892615,
"grad_norm": 0.6260116696357727,
"learning_rate": 9.93846508400967e-05,
"loss": 0.4761,
"step": 2540
},
{
"epoch": 0.7277916518016411,
"grad_norm": 0.5651645660400391,
"learning_rate": 9.936896688487262e-05,
"loss": 0.48,
"step": 2550
},
{
"epoch": 0.7306457367106671,
"grad_norm": 1.0688337087631226,
"learning_rate": 9.9353086830418e-05,
"loss": 0.4256,
"step": 2560
},
{
"epoch": 0.7334998216196932,
"grad_norm": 0.7255706191062927,
"learning_rate": 9.933701073981023e-05,
"loss": 0.4464,
"step": 2570
},
{
"epoch": 0.7363539065287192,
"grad_norm": 0.5594896078109741,
"learning_rate": 9.932073867690539e-05,
"loss": 0.4456,
"step": 2580
},
{
"epoch": 0.7392079914377453,
"grad_norm": 0.5755929946899414,
"learning_rate": 9.930427070633798e-05,
"loss": 0.4013,
"step": 2590
},
{
"epoch": 0.7420620763467713,
"grad_norm": 0.8663392066955566,
"learning_rate": 9.928760689352072e-05,
"loss": 0.4306,
"step": 2600
},
{
"epoch": 0.7449161612557974,
"grad_norm": 0.6546819806098938,
"learning_rate": 9.927074730464417e-05,
"loss": 0.4352,
"step": 2610
},
{
"epoch": 0.7477702461648233,
"grad_norm": 0.6611933708190918,
"learning_rate": 9.925369200667655e-05,
"loss": 0.4681,
"step": 2620
},
{
"epoch": 0.7506243310738494,
"grad_norm": 0.9629554152488708,
"learning_rate": 9.923644106736348e-05,
"loss": 0.4626,
"step": 2630
},
{
"epoch": 0.7534784159828755,
"grad_norm": 0.6556516289710999,
"learning_rate": 9.92189945552277e-05,
"loss": 0.4009,
"step": 2640
},
{
"epoch": 0.7563325008919015,
"grad_norm": 0.5907385945320129,
"learning_rate": 9.920135253956875e-05,
"loss": 0.4303,
"step": 2650
},
{
"epoch": 0.7591865858009276,
"grad_norm": 0.5351846218109131,
"learning_rate": 9.918351509046276e-05,
"loss": 0.4192,
"step": 2660
},
{
"epoch": 0.7620406707099536,
"grad_norm": 0.8660167455673218,
"learning_rate": 9.91654822787621e-05,
"loss": 0.4525,
"step": 2670
},
{
"epoch": 0.7648947556189797,
"grad_norm": 0.5204272270202637,
"learning_rate": 9.914725417609523e-05,
"loss": 0.3889,
"step": 2680
},
{
"epoch": 0.7677488405280057,
"grad_norm": 0.8953636884689331,
"learning_rate": 9.912883085486626e-05,
"loss": 0.4623,
"step": 2690
},
{
"epoch": 0.7706029254370318,
"grad_norm": 0.6904731392860413,
"learning_rate": 9.911021238825473e-05,
"loss": 0.4251,
"step": 2700
},
{
"epoch": 0.7734570103460578,
"grad_norm": 0.4080071449279785,
"learning_rate": 9.909139885021535e-05,
"loss": 0.4477,
"step": 2710
},
{
"epoch": 0.7763110952550838,
"grad_norm": 0.6172974705696106,
"learning_rate": 9.907239031547765e-05,
"loss": 0.4164,
"step": 2720
},
{
"epoch": 0.7791651801641098,
"grad_norm": 0.4886259436607361,
"learning_rate": 9.905318685954574e-05,
"loss": 0.421,
"step": 2730
},
{
"epoch": 0.7820192650731359,
"grad_norm": 1.2826932668685913,
"learning_rate": 9.903378855869797e-05,
"loss": 0.4093,
"step": 2740
},
{
"epoch": 0.784873349982162,
"grad_norm": 0.7179902195930481,
"learning_rate": 9.901419548998658e-05,
"loss": 0.4022,
"step": 2750
},
{
"epoch": 0.787727434891188,
"grad_norm": 0.559853732585907,
"learning_rate": 9.899440773123756e-05,
"loss": 0.4364,
"step": 2760
},
{
"epoch": 0.7905815198002141,
"grad_norm": 1.151147723197937,
"learning_rate": 9.897442536105013e-05,
"loss": 0.4179,
"step": 2770
},
{
"epoch": 0.7934356047092401,
"grad_norm": 0.7972052097320557,
"learning_rate": 9.895424845879657e-05,
"loss": 0.4534,
"step": 2780
},
{
"epoch": 0.7962896896182662,
"grad_norm": 1.4866856336593628,
"learning_rate": 9.893387710462189e-05,
"loss": 0.4501,
"step": 2790
},
{
"epoch": 0.7991437745272922,
"grad_norm": 0.6486208438873291,
"learning_rate": 9.891331137944344e-05,
"loss": 0.4327,
"step": 2800
},
{
"epoch": 0.8019978594363182,
"grad_norm": 0.4823531210422516,
"learning_rate": 9.889255136495063e-05,
"loss": 0.4188,
"step": 2810
},
{
"epoch": 0.8048519443453442,
"grad_norm": 0.4850338399410248,
"learning_rate": 9.887159714360469e-05,
"loss": 0.4096,
"step": 2820
},
{
"epoch": 0.8077060292543703,
"grad_norm": 0.6615185737609863,
"learning_rate": 9.885044879863816e-05,
"loss": 0.4626,
"step": 2830
},
{
"epoch": 0.8105601141633964,
"grad_norm": 0.639529824256897,
"learning_rate": 9.88291064140547e-05,
"loss": 0.4125,
"step": 2840
},
{
"epoch": 0.8134141990724224,
"grad_norm": 0.6544278860092163,
"learning_rate": 9.880757007462876e-05,
"loss": 0.4319,
"step": 2850
},
{
"epoch": 0.8162682839814485,
"grad_norm": 0.8010094165802002,
"learning_rate": 9.878583986590513e-05,
"loss": 0.4206,
"step": 2860
},
{
"epoch": 0.8191223688904745,
"grad_norm": 0.9454021453857422,
"learning_rate": 9.876391587419871e-05,
"loss": 0.4327,
"step": 2870
},
{
"epoch": 0.8219764537995006,
"grad_norm": 0.5547480583190918,
"learning_rate": 9.874179818659415e-05,
"loss": 0.448,
"step": 2880
},
{
"epoch": 0.8248305387085266,
"grad_norm": 0.526059091091156,
"learning_rate": 9.871948689094542e-05,
"loss": 0.413,
"step": 2890
},
{
"epoch": 0.8276846236175526,
"grad_norm": 0.5893587470054626,
"learning_rate": 9.869698207587558e-05,
"loss": 0.4044,
"step": 2900
},
{
"epoch": 0.8305387085265786,
"grad_norm": 0.5328799486160278,
"learning_rate": 9.867428383077637e-05,
"loss": 0.4255,
"step": 2910
},
{
"epoch": 0.8333927934356047,
"grad_norm": 0.4376719295978546,
"learning_rate": 9.865139224580779e-05,
"loss": 0.3751,
"step": 2920
},
{
"epoch": 0.8362468783446307,
"grad_norm": 0.9210516214370728,
"learning_rate": 9.86283074118979e-05,
"loss": 0.4657,
"step": 2930
},
{
"epoch": 0.8391009632536568,
"grad_norm": 0.5722871422767639,
"learning_rate": 9.86050294207423e-05,
"loss": 0.435,
"step": 2940
},
{
"epoch": 0.8419550481626829,
"grad_norm": 0.6614554524421692,
"learning_rate": 9.858155836480387e-05,
"loss": 0.4466,
"step": 2950
},
{
"epoch": 0.8448091330717089,
"grad_norm": 0.8073657751083374,
"learning_rate": 9.855789433731234e-05,
"loss": 0.405,
"step": 2960
},
{
"epoch": 0.847663217980735,
"grad_norm": 0.5362779498100281,
"learning_rate": 9.853403743226393e-05,
"loss": 0.4344,
"step": 2970
},
{
"epoch": 0.850517302889761,
"grad_norm": 0.6456807851791382,
"learning_rate": 9.850998774442106e-05,
"loss": 0.4305,
"step": 2980
},
{
"epoch": 0.853371387798787,
"grad_norm": 0.5409958958625793,
"learning_rate": 9.848574536931185e-05,
"loss": 0.4312,
"step": 2990
},
{
"epoch": 0.856225472707813,
"grad_norm": 0.5728853940963745,
"learning_rate": 9.846131040322983e-05,
"loss": 0.4364,
"step": 3000
},
{
"epoch": 0.8590795576168391,
"grad_norm": 0.5256248116493225,
"learning_rate": 9.843668294323348e-05,
"loss": 0.4201,
"step": 3010
},
{
"epoch": 0.8619336425258651,
"grad_norm": 0.8561432957649231,
"learning_rate": 9.841186308714589e-05,
"loss": 0.4336,
"step": 3020
},
{
"epoch": 0.8647877274348912,
"grad_norm": 0.7229063510894775,
"learning_rate": 9.838685093355444e-05,
"loss": 0.407,
"step": 3030
},
{
"epoch": 0.8676418123439172,
"grad_norm": 0.8342879414558411,
"learning_rate": 9.836164658181026e-05,
"loss": 0.4489,
"step": 3040
},
{
"epoch": 0.8704958972529433,
"grad_norm": 0.5657804608345032,
"learning_rate": 9.833625013202794e-05,
"loss": 0.4169,
"step": 3050
},
{
"epoch": 0.8733499821619694,
"grad_norm": 0.5953682065010071,
"learning_rate": 9.831066168508514e-05,
"loss": 0.408,
"step": 3060
},
{
"epoch": 0.8762040670709954,
"grad_norm": 0.456233948469162,
"learning_rate": 9.828488134262208e-05,
"loss": 0.4163,
"step": 3070
},
{
"epoch": 0.8790581519800214,
"grad_norm": 0.7670127749443054,
"learning_rate": 9.825890920704126e-05,
"loss": 0.4373,
"step": 3080
},
{
"epoch": 0.8819122368890474,
"grad_norm": 0.9011564254760742,
"learning_rate": 9.823274538150702e-05,
"loss": 0.3886,
"step": 3090
},
{
"epoch": 0.8847663217980735,
"grad_norm": 0.5500415563583374,
"learning_rate": 9.820638996994509e-05,
"loss": 0.4382,
"step": 3100
},
{
"epoch": 0.8876204067070995,
"grad_norm": 0.6603699922561646,
"learning_rate": 9.81798430770422e-05,
"loss": 0.4768,
"step": 3110
},
{
"epoch": 0.8904744916161256,
"grad_norm": 0.43730512261390686,
"learning_rate": 9.815310480824564e-05,
"loss": 0.3954,
"step": 3120
},
{
"epoch": 0.8933285765251516,
"grad_norm": 0.5685926675796509,
"learning_rate": 9.812617526976295e-05,
"loss": 0.4326,
"step": 3130
},
{
"epoch": 0.8961826614341777,
"grad_norm": 0.4876101016998291,
"learning_rate": 9.809905456856135e-05,
"loss": 0.4197,
"step": 3140
},
{
"epoch": 0.8990367463432037,
"grad_norm": 0.6296002864837646,
"learning_rate": 9.807174281236735e-05,
"loss": 0.4574,
"step": 3150
},
{
"epoch": 0.9018908312522298,
"grad_norm": 0.5232740640640259,
"learning_rate": 9.804424010966644e-05,
"loss": 0.4251,
"step": 3160
},
{
"epoch": 0.9047449161612559,
"grad_norm": 0.42207297682762146,
"learning_rate": 9.801654656970252e-05,
"loss": 0.3989,
"step": 3170
},
{
"epoch": 0.9075990010702818,
"grad_norm": 0.6646615266799927,
"learning_rate": 9.798866230247752e-05,
"loss": 0.4171,
"step": 3180
},
{
"epoch": 0.9104530859793079,
"grad_norm": 0.6919196248054504,
"learning_rate": 9.796058741875095e-05,
"loss": 0.4811,
"step": 3190
},
{
"epoch": 0.9133071708883339,
"grad_norm": 0.5413965582847595,
"learning_rate": 9.79323220300395e-05,
"loss": 0.4665,
"step": 3200
},
{
"epoch": 0.91616125579736,
"grad_norm": 0.8154647946357727,
"learning_rate": 9.790386624861657e-05,
"loss": 0.4236,
"step": 3210
},
{
"epoch": 0.919015340706386,
"grad_norm": 0.48265111446380615,
"learning_rate": 9.787522018751179e-05,
"loss": 0.419,
"step": 3220
},
{
"epoch": 0.9218694256154121,
"grad_norm": 0.7374817728996277,
"learning_rate": 9.78463839605106e-05,
"loss": 0.4372,
"step": 3230
},
{
"epoch": 0.9247235105244381,
"grad_norm": 0.7971628904342651,
"learning_rate": 9.781735768215385e-05,
"loss": 0.4593,
"step": 3240
},
{
"epoch": 0.9275775954334642,
"grad_norm": 0.49169448018074036,
"learning_rate": 9.778814146773727e-05,
"loss": 0.4407,
"step": 3250
},
{
"epoch": 0.9304316803424902,
"grad_norm": 0.8247508406639099,
"learning_rate": 9.7758735433311e-05,
"loss": 0.3936,
"step": 3260
},
{
"epoch": 0.9332857652515162,
"grad_norm": 0.4885646402835846,
"learning_rate": 9.772913969567923e-05,
"loss": 0.4159,
"step": 3270
},
{
"epoch": 0.9361398501605422,
"grad_norm": 0.5817092657089233,
"learning_rate": 9.76993543723996e-05,
"loss": 0.4822,
"step": 3280
},
{
"epoch": 0.9389939350695683,
"grad_norm": 0.5657879114151001,
"learning_rate": 9.766937958178293e-05,
"loss": 0.4002,
"step": 3290
},
{
"epoch": 0.9418480199785944,
"grad_norm": 0.6319302916526794,
"learning_rate": 9.763921544289245e-05,
"loss": 0.4502,
"step": 3300
},
{
"epoch": 0.9447021048876204,
"grad_norm": 0.7825123071670532,
"learning_rate": 9.760886207554367e-05,
"loss": 0.426,
"step": 3310
},
{
"epoch": 0.9475561897966465,
"grad_norm": 0.6912452578544617,
"learning_rate": 9.757831960030364e-05,
"loss": 0.4202,
"step": 3320
},
{
"epoch": 0.9504102747056725,
"grad_norm": 0.6349189877510071,
"learning_rate": 9.754758813849059e-05,
"loss": 0.4484,
"step": 3330
},
{
"epoch": 0.9532643596146986,
"grad_norm": 0.5008338689804077,
"learning_rate": 9.751666781217343e-05,
"loss": 0.4398,
"step": 3340
},
{
"epoch": 0.9561184445237246,
"grad_norm": 0.8321357369422913,
"learning_rate": 9.748555874417128e-05,
"loss": 0.42,
"step": 3350
},
{
"epoch": 0.9589725294327506,
"grad_norm": 0.454465389251709,
"learning_rate": 9.745426105805293e-05,
"loss": 0.3906,
"step": 3360
},
{
"epoch": 0.9618266143417766,
"grad_norm": 0.48528704047203064,
"learning_rate": 9.74227748781364e-05,
"loss": 0.4359,
"step": 3370
},
{
"epoch": 0.9646806992508027,
"grad_norm": 0.47498252987861633,
"learning_rate": 9.739110032948844e-05,
"loss": 0.3875,
"step": 3380
},
{
"epoch": 0.9675347841598287,
"grad_norm": 0.5873503088951111,
"learning_rate": 9.7359237537924e-05,
"loss": 0.4297,
"step": 3390
},
{
"epoch": 0.9703888690688548,
"grad_norm": 0.47873497009277344,
"learning_rate": 9.732718663000577e-05,
"loss": 0.4172,
"step": 3400
},
{
"epoch": 0.9732429539778809,
"grad_norm": 0.5299973487854004,
"learning_rate": 9.729494773304364e-05,
"loss": 0.4078,
"step": 3410
},
{
"epoch": 0.9760970388869069,
"grad_norm": 0.5915161967277527,
"learning_rate": 9.726252097509423e-05,
"loss": 0.4236,
"step": 3420
},
{
"epoch": 0.978951123795933,
"grad_norm": 0.87859046459198,
"learning_rate": 9.722990648496038e-05,
"loss": 0.4555,
"step": 3430
},
{
"epoch": 0.981805208704959,
"grad_norm": 0.6019312739372253,
"learning_rate": 9.719710439219058e-05,
"loss": 0.4058,
"step": 3440
},
{
"epoch": 0.984659293613985,
"grad_norm": 0.8020815253257751,
"learning_rate": 9.716411482707857e-05,
"loss": 0.4319,
"step": 3450
},
{
"epoch": 0.987513378523011,
"grad_norm": 0.7268204689025879,
"learning_rate": 9.713093792066266e-05,
"loss": 0.4416,
"step": 3460
},
{
"epoch": 0.9903674634320371,
"grad_norm": 0.6523804664611816,
"learning_rate": 9.709757380472536e-05,
"loss": 0.4043,
"step": 3470
},
{
"epoch": 0.9932215483410631,
"grad_norm": 0.6375195980072021,
"learning_rate": 9.706402261179279e-05,
"loss": 0.3974,
"step": 3480
},
{
"epoch": 0.9960756332500892,
"grad_norm": 0.9598755836486816,
"learning_rate": 9.703028447513417e-05,
"loss": 0.4353,
"step": 3490
},
{
"epoch": 0.9989297181591152,
"grad_norm": 0.4769824147224426,
"learning_rate": 9.699635952876127e-05,
"loss": 0.4413,
"step": 3500
}
],
"logging_steps": 10,
"max_steps": 17515,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.8992550309223465e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}