m1-mlp / trainer_state.json
HelenZzz's picture
Full run push
eaf0d2e verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 100000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.005,
"grad_norm": 0.5228317379951477,
"learning_rate": 3.326666666666667e-06,
"loss": 0.7898,
"step": 500
},
{
"epoch": 0.01,
"grad_norm": 1.0322492122650146,
"learning_rate": 6.660000000000001e-06,
"loss": 0.5367,
"step": 1000
},
{
"epoch": 0.015,
"grad_norm": 2.0602364540100098,
"learning_rate": 9.993333333333333e-06,
"loss": 0.4491,
"step": 1500
},
{
"epoch": 0.02,
"grad_norm": 0.8021494150161743,
"learning_rate": 1.3326666666666667e-05,
"loss": 0.3972,
"step": 2000
},
{
"epoch": 0.025,
"grad_norm": 2.3705506324768066,
"learning_rate": 1.666e-05,
"loss": 0.3854,
"step": 2500
},
{
"epoch": 0.03,
"grad_norm": 1.653393030166626,
"learning_rate": 1.9993333333333337e-05,
"loss": 0.3723,
"step": 3000
},
{
"epoch": 0.035,
"grad_norm": 2.5747907161712646,
"learning_rate": 1.9998694075951004e-05,
"loss": 0.3747,
"step": 3500
},
{
"epoch": 0.04,
"grad_norm": 2.956480026245117,
"learning_rate": 1.9994766172915367e-05,
"loss": 0.3435,
"step": 4000
},
{
"epoch": 0.045,
"grad_norm": 1.4719043970108032,
"learning_rate": 1.9988217315672814e-05,
"loss": 0.3456,
"step": 4500
},
{
"epoch": 0.05,
"grad_norm": 1.938084602355957,
"learning_rate": 1.997904922154766e-05,
"loss": 0.3284,
"step": 5000
},
{
"epoch": 0.055,
"grad_norm": 3.1742770671844482,
"learning_rate": 1.99672642947137e-05,
"loss": 0.3323,
"step": 5500
},
{
"epoch": 0.06,
"grad_norm": 3.088749647140503,
"learning_rate": 1.995286562556374e-05,
"loss": 0.333,
"step": 6000
},
{
"epoch": 0.065,
"grad_norm": 4.315283298492432,
"learning_rate": 1.993585698989922e-05,
"loss": 0.335,
"step": 6500
},
{
"epoch": 0.07,
"grad_norm": 1.9591349363327026,
"learning_rate": 1.9916242847940064e-05,
"loss": 0.3301,
"step": 7000
},
{
"epoch": 0.075,
"grad_norm": 2.167367458343506,
"learning_rate": 1.9894028343155063e-05,
"loss": 0.3331,
"step": 7500
},
{
"epoch": 0.08,
"grad_norm": 1.7126212120056152,
"learning_rate": 1.9869219300913088e-05,
"loss": 0.3305,
"step": 8000
},
{
"epoch": 0.085,
"grad_norm": 2.834191083908081,
"learning_rate": 1.9841822226955496e-05,
"loss": 0.3144,
"step": 8500
},
{
"epoch": 0.09,
"grad_norm": 1.9557807445526123,
"learning_rate": 1.9811844305690107e-05,
"loss": 0.3165,
"step": 9000
},
{
"epoch": 0.095,
"grad_norm": 2.5267953872680664,
"learning_rate": 1.977929339830722e-05,
"loss": 0.3174,
"step": 9500
},
{
"epoch": 0.1,
"grad_norm": 1.9610834121704102,
"learning_rate": 1.9744178040718144e-05,
"loss": 0.3093,
"step": 10000
},
{
"epoch": 0.105,
"grad_norm": 3.3462774753570557,
"learning_rate": 1.9706507441316814e-05,
"loss": 0.3191,
"step": 10500
},
{
"epoch": 0.11,
"grad_norm": 1.884909987449646,
"learning_rate": 1.9666291478565047e-05,
"loss": 0.3254,
"step": 11000
},
{
"epoch": 0.115,
"grad_norm": 2.515130043029785,
"learning_rate": 1.962354069840208e-05,
"loss": 0.3134,
"step": 11500
},
{
"epoch": 0.12,
"grad_norm": 3.2403035163879395,
"learning_rate": 1.9578266311479082e-05,
"loss": 0.3154,
"step": 12000
},
{
"epoch": 0.125,
"grad_norm": 2.7380077838897705,
"learning_rate": 1.9530480190219365e-05,
"loss": 0.3203,
"step": 12500
},
{
"epoch": 0.13,
"grad_norm": 3.1931233406066895,
"learning_rate": 1.948019486570503e-05,
"loss": 0.3058,
"step": 13000
},
{
"epoch": 0.135,
"grad_norm": 3.3102500438690186,
"learning_rate": 1.942742352439093e-05,
"loss": 0.3024,
"step": 13500
},
{
"epoch": 0.14,
"grad_norm": 3.0574374198913574,
"learning_rate": 1.937218000464673e-05,
"loss": 0.3115,
"step": 14000
},
{
"epoch": 0.145,
"grad_norm": 3.5112788677215576,
"learning_rate": 1.931447879312804e-05,
"loss": 0.312,
"step": 14500
},
{
"epoch": 0.15,
"grad_norm": 2.5837135314941406,
"learning_rate": 1.9254335020977542e-05,
"loss": 0.3095,
"step": 15000
},
{
"epoch": 0.155,
"grad_norm": 2.8150858879089355,
"learning_rate": 1.9191764459857124e-05,
"loss": 0.3038,
"step": 15500
},
{
"epoch": 0.16,
"grad_norm": 2.605210304260254,
"learning_rate": 1.9126783517812005e-05,
"loss": 0.2889,
"step": 16000
},
{
"epoch": 0.165,
"grad_norm": 2.668278217315674,
"learning_rate": 1.9059409234968034e-05,
"loss": 0.2966,
"step": 16500
},
{
"epoch": 0.17,
"grad_norm": 1.3171859979629517,
"learning_rate": 1.8989659279063214e-05,
"loss": 0.2891,
"step": 17000
},
{
"epoch": 0.175,
"grad_norm": 3.820676565170288,
"learning_rate": 1.8917551940814615e-05,
"loss": 0.2953,
"step": 17500
},
{
"epoch": 0.18,
"grad_norm": 2.1207947731018066,
"learning_rate": 1.8843106129121993e-05,
"loss": 0.2982,
"step": 18000
},
{
"epoch": 0.185,
"grad_norm": 19.94713592529297,
"learning_rate": 1.8766341366109214e-05,
"loss": 0.2908,
"step": 18500
},
{
"epoch": 0.19,
"grad_norm": 2.073420286178589,
"learning_rate": 1.8687277782004954e-05,
"loss": 0.2788,
"step": 19000
},
{
"epoch": 0.195,
"grad_norm": 3.1489017009735107,
"learning_rate": 1.8605936109863872e-05,
"loss": 0.299,
"step": 19500
},
{
"epoch": 0.2,
"grad_norm": 2.1240925788879395,
"learning_rate": 1.8522337680129728e-05,
"loss": 0.2905,
"step": 20000
},
{
"epoch": 0.205,
"grad_norm": 2.2307348251342773,
"learning_rate": 1.843650441504186e-05,
"loss": 0.2928,
"step": 20500
},
{
"epoch": 0.21,
"grad_norm": 1.1732121706008911,
"learning_rate": 1.8348458822886437e-05,
"loss": 0.2893,
"step": 21000
},
{
"epoch": 0.215,
"grad_norm": 2.374523639678955,
"learning_rate": 1.8258223992094055e-05,
"loss": 0.2995,
"step": 21500
},
{
"epoch": 0.22,
"grad_norm": 1.002447247505188,
"learning_rate": 1.816582358518521e-05,
"loss": 0.2884,
"step": 22000
},
{
"epoch": 0.225,
"grad_norm": 2.586608409881592,
"learning_rate": 1.80712818325652e-05,
"loss": 0.2908,
"step": 22500
},
{
"epoch": 0.23,
"grad_norm": 2.567784070968628,
"learning_rate": 1.7974623526170113e-05,
"loss": 0.2955,
"step": 23000
},
{
"epoch": 0.235,
"grad_norm": 1.543729305267334,
"learning_rate": 1.78758740129656e-05,
"loss": 0.2848,
"step": 23500
},
{
"epoch": 0.24,
"grad_norm": 4.3870954513549805,
"learning_rate": 1.777505918830004e-05,
"loss": 0.2856,
"step": 24000
},
{
"epoch": 0.245,
"grad_norm": 2.431323528289795,
"learning_rate": 1.767220548911395e-05,
"loss": 0.2699,
"step": 24500
},
{
"epoch": 0.25,
"grad_norm": 2.8813323974609375,
"learning_rate": 1.756733988700737e-05,
"loss": 0.275,
"step": 25000
},
{
"epoch": 0.255,
"grad_norm": 2.1704187393188477,
"learning_rate": 1.7460489881166986e-05,
"loss": 0.2852,
"step": 25500
},
{
"epoch": 0.26,
"grad_norm": 1.9194124937057495,
"learning_rate": 1.7351683491154984e-05,
"loss": 0.2963,
"step": 26000
},
{
"epoch": 0.265,
"grad_norm": 2.9418258666992188,
"learning_rate": 1.7240949249561402e-05,
"loss": 0.282,
"step": 26500
},
{
"epoch": 0.27,
"grad_norm": 2.2254602909088135,
"learning_rate": 1.7128316194521923e-05,
"loss": 0.269,
"step": 27000
},
{
"epoch": 0.275,
"grad_norm": 3.698273181915283,
"learning_rate": 1.7013813862103163e-05,
"loss": 0.2748,
"step": 27500
},
{
"epoch": 0.28,
"grad_norm": 1.1656277179718018,
"learning_rate": 1.689747227855733e-05,
"loss": 0.2798,
"step": 28000
},
{
"epoch": 0.285,
"grad_norm": 2.6570346355438232,
"learning_rate": 1.677932195244836e-05,
"loss": 0.2791,
"step": 28500
},
{
"epoch": 0.29,
"grad_norm": 3.1429874897003174,
"learning_rate": 1.6659393866651585e-05,
"loss": 0.2695,
"step": 29000
},
{
"epoch": 0.295,
"grad_norm": 3.390726089477539,
"learning_rate": 1.6537719470228993e-05,
"loss": 0.2749,
"step": 29500
},
{
"epoch": 0.3,
"grad_norm": 2.2655887603759766,
"learning_rate": 1.6414330670182278e-05,
"loss": 0.2764,
"step": 30000
},
{
"epoch": 0.305,
"grad_norm": 2.555089235305786,
"learning_rate": 1.6289259823085765e-05,
"loss": 0.2834,
"step": 30500
},
{
"epoch": 0.31,
"grad_norm": 2.014594793319702,
"learning_rate": 1.6162539726601477e-05,
"loss": 0.2801,
"step": 31000
},
{
"epoch": 0.315,
"grad_norm": 3.4530141353607178,
"learning_rate": 1.603420361087849e-05,
"loss": 0.2789,
"step": 31500
},
{
"epoch": 0.32,
"grad_norm": 1.4499799013137817,
"learning_rate": 1.5904285129838924e-05,
"loss": 0.2799,
"step": 32000
},
{
"epoch": 0.325,
"grad_norm": 2.807821035385132,
"learning_rate": 1.577281835235279e-05,
"loss": 0.2717,
"step": 32500
},
{
"epoch": 0.33,
"grad_norm": 2.2752676010131836,
"learning_rate": 1.563983775330399e-05,
"loss": 0.2694,
"step": 33000
},
{
"epoch": 0.335,
"grad_norm": 1.8416115045547485,
"learning_rate": 1.5505378204549907e-05,
"loss": 0.2773,
"step": 33500
},
{
"epoch": 0.34,
"grad_norm": 5.743653297424316,
"learning_rate": 1.536947496577684e-05,
"loss": 0.2786,
"step": 34000
},
{
"epoch": 0.345,
"grad_norm": 2.5975189208984375,
"learning_rate": 1.5232163675253793e-05,
"loss": 0.2825,
"step": 34500
},
{
"epoch": 0.35,
"grad_norm": 1.8504586219787598,
"learning_rate": 1.5093480340486922e-05,
"loss": 0.2693,
"step": 35000
},
{
"epoch": 0.355,
"grad_norm": 2.5584330558776855,
"learning_rate": 1.495346132877721e-05,
"loss": 0.2688,
"step": 35500
},
{
"epoch": 0.36,
"grad_norm": 3.1796674728393555,
"learning_rate": 1.4812143357683773e-05,
"loss": 0.2552,
"step": 36000
},
{
"epoch": 0.365,
"grad_norm": 2.1949145793914795,
"learning_rate": 1.4669563485395315e-05,
"loss": 0.2735,
"step": 36500
},
{
"epoch": 0.37,
"grad_norm": 2.1829614639282227,
"learning_rate": 1.4525759101012249e-05,
"loss": 0.2778,
"step": 37000
},
{
"epoch": 0.375,
"grad_norm": 2.5891077518463135,
"learning_rate": 1.4380767914742074e-05,
"loss": 0.2703,
"step": 37500
},
{
"epoch": 0.38,
"grad_norm": 1.912104606628418,
"learning_rate": 1.4234627948010497e-05,
"loss": 0.2603,
"step": 38000
},
{
"epoch": 0.385,
"grad_norm": 2.5725393295288086,
"learning_rate": 1.4087377523491016e-05,
"loss": 0.2745,
"step": 38500
},
{
"epoch": 0.39,
"grad_norm": 2.139127016067505,
"learning_rate": 1.3939055255055419e-05,
"loss": 0.2592,
"step": 39000
},
{
"epoch": 0.395,
"grad_norm": 3.271855115890503,
"learning_rate": 1.3789700037647988e-05,
"loss": 0.2676,
"step": 39500
},
{
"epoch": 0.4,
"grad_norm": 4.065393924713135,
"learning_rate": 1.363935103708598e-05,
"loss": 0.2582,
"step": 40000
},
{
"epoch": 0.405,
"grad_norm": 4.319698333740234,
"learning_rate": 1.3488047679789052e-05,
"loss": 0.2738,
"step": 40500
},
{
"epoch": 0.41,
"grad_norm": 1.9484474658966064,
"learning_rate": 1.3335829642440372e-05,
"loss": 0.2625,
"step": 41000
},
{
"epoch": 0.415,
"grad_norm": 2.7909133434295654,
"learning_rate": 1.3182736841582109e-05,
"loss": 0.2732,
"step": 41500
},
{
"epoch": 0.42,
"grad_norm": 1.9949915409088135,
"learning_rate": 1.3028809423147997e-05,
"loss": 0.2789,
"step": 42000
},
{
"epoch": 0.425,
"grad_norm": 2.6040537357330322,
"learning_rate": 1.2874087751935756e-05,
"loss": 0.27,
"step": 42500
},
{
"epoch": 0.43,
"grad_norm": 2.1791725158691406,
"learning_rate": 1.2718612401022148e-05,
"loss": 0.2682,
"step": 43000
},
{
"epoch": 0.435,
"grad_norm": 2.334940195083618,
"learning_rate": 1.2562424141123364e-05,
"loss": 0.2702,
"step": 43500
},
{
"epoch": 0.44,
"grad_norm": 3.011237144470215,
"learning_rate": 1.2405563929903614e-05,
"loss": 0.2684,
"step": 44000
},
{
"epoch": 0.445,
"grad_norm": 3.324738025665283,
"learning_rate": 1.2248072901234715e-05,
"loss": 0.2626,
"step": 44500
},
{
"epoch": 0.45,
"grad_norm": 2.0907318592071533,
"learning_rate": 1.2089992354409415e-05,
"loss": 0.2673,
"step": 45000
},
{
"epoch": 0.455,
"grad_norm": 2.5054478645324707,
"learning_rate": 1.1931363743311377e-05,
"loss": 0.2634,
"step": 45500
},
{
"epoch": 0.46,
"grad_norm": 3.015181541442871,
"learning_rate": 1.1772228665544634e-05,
"loss": 0.2623,
"step": 46000
},
{
"epoch": 0.465,
"grad_norm": 1.9461313486099243,
"learning_rate": 1.1612628851525313e-05,
"loss": 0.2685,
"step": 46500
},
{
"epoch": 0.47,
"grad_norm": 2.5035080909729004,
"learning_rate": 1.1452606153538577e-05,
"loss": 0.2625,
"step": 47000
},
{
"epoch": 0.475,
"grad_norm": 1.4080424308776855,
"learning_rate": 1.129220253476358e-05,
"loss": 0.2651,
"step": 47500
},
{
"epoch": 0.48,
"grad_norm": 2.9700748920440674,
"learning_rate": 1.1131460058269342e-05,
"loss": 0.2547,
"step": 48000
},
{
"epoch": 0.485,
"grad_norm": 2.252192258834839,
"learning_rate": 1.0970420875984453e-05,
"loss": 0.2629,
"step": 48500
},
{
"epoch": 0.49,
"grad_norm": 3.6359364986419678,
"learning_rate": 1.0809127217643462e-05,
"loss": 0.2623,
"step": 49000
},
{
"epoch": 0.495,
"grad_norm": 2.2924039363861084,
"learning_rate": 1.0647621379712843e-05,
"loss": 0.2648,
"step": 49500
},
{
"epoch": 0.5,
"grad_norm": 1.8426686525344849,
"learning_rate": 1.0485945714299487e-05,
"loss": 0.2536,
"step": 50000
},
{
"epoch": 0.505,
"grad_norm": 2.85508131980896,
"learning_rate": 1.0324142618044612e-05,
"loss": 0.2685,
"step": 50500
},
{
"epoch": 0.51,
"grad_norm": 1.8583101034164429,
"learning_rate": 1.0162254521005956e-05,
"loss": 0.2627,
"step": 51000
},
{
"epoch": 0.515,
"grad_norm": 2.0837361812591553,
"learning_rate": 1.0000323875531241e-05,
"loss": 0.2594,
"step": 51500
},
{
"epoch": 0.52,
"grad_norm": 2.786468505859375,
"learning_rate": 9.838393145125787e-06,
"loss": 0.2602,
"step": 52000
},
{
"epoch": 0.525,
"grad_norm": 3.2767674922943115,
"learning_rate": 9.676504793317163e-06,
"loss": 0.2575,
"step": 52500
},
{
"epoch": 0.53,
"grad_norm": 2.087574005126953,
"learning_rate": 9.514701272519918e-06,
"loss": 0.2536,
"step": 53000
},
{
"epoch": 0.535,
"grad_norm": 1.6230825185775757,
"learning_rate": 9.353025012903112e-06,
"loss": 0.2634,
"step": 53500
},
{
"epoch": 0.54,
"grad_norm": 1.9455654621124268,
"learning_rate": 9.191518411263777e-06,
"loss": 0.2415,
"step": 54000
},
{
"epoch": 0.545,
"grad_norm": 2.6995887756347656,
"learning_rate": 9.03022381990908e-06,
"loss": 0.2687,
"step": 54500
},
{
"epoch": 0.55,
"grad_norm": 4.489888668060303,
"learning_rate": 8.869183535550167e-06,
"loss": 0.2719,
"step": 55000
},
{
"epoch": 0.555,
"grad_norm": 2.966480016708374,
"learning_rate": 8.708439788210573e-06,
"loss": 0.268,
"step": 55500
},
{
"epoch": 0.56,
"grad_norm": 3.2469143867492676,
"learning_rate": 8.548034730152153e-06,
"loss": 0.262,
"step": 56000
},
{
"epoch": 0.565,
"grad_norm": 2.2795863151550293,
"learning_rate": 8.388010424821354e-06,
"loss": 0.2644,
"step": 56500
},
{
"epoch": 0.57,
"grad_norm": 2.264314889907837,
"learning_rate": 8.22840883581883e-06,
"loss": 0.2519,
"step": 57000
},
{
"epoch": 0.575,
"grad_norm": 2.3506600856781006,
"learning_rate": 8.069271815895196e-06,
"loss": 0.2797,
"step": 57500
},
{
"epoch": 0.58,
"grad_norm": 4.5607428550720215,
"learning_rate": 7.910641095975886e-06,
"loss": 0.2574,
"step": 58000
},
{
"epoch": 0.585,
"grad_norm": 0.9732487201690674,
"learning_rate": 7.752558274217919e-06,
"loss": 0.2529,
"step": 58500
},
{
"epoch": 0.59,
"grad_norm": 3.291529417037964,
"learning_rate": 7.595064805101538e-06,
"loss": 0.2502,
"step": 59000
},
{
"epoch": 0.595,
"grad_norm": 1.8199131488800049,
"learning_rate": 7.438201988559449e-06,
"loss": 0.2643,
"step": 59500
},
{
"epoch": 0.6,
"grad_norm": 5.467274188995361,
"learning_rate": 7.282010959146662e-06,
"loss": 0.2636,
"step": 60000
},
{
"epoch": 0.605,
"grad_norm": 3.899115562438965,
"learning_rate": 7.1265326752536545e-06,
"loss": 0.2625,
"step": 60500
},
{
"epoch": 0.61,
"grad_norm": 2.5981805324554443,
"learning_rate": 6.971807908365737e-06,
"loss": 0.2577,
"step": 61000
},
{
"epoch": 0.615,
"grad_norm": 2.875891923904419,
"learning_rate": 6.817877232371453e-06,
"loss": 0.2649,
"step": 61500
},
{
"epoch": 0.62,
"grad_norm": 2.9105169773101807,
"learning_rate": 6.66478101292277e-06,
"loss": 0.2496,
"step": 62000
},
{
"epoch": 0.625,
"grad_norm": 3.293264389038086,
"learning_rate": 6.5125593968498855e-06,
"loss": 0.2444,
"step": 62500
},
{
"epoch": 0.63,
"grad_norm": 3.560056209564209,
"learning_rate": 6.361252301633422e-06,
"loss": 0.2625,
"step": 63000
},
{
"epoch": 0.635,
"grad_norm": 2.613802909851074,
"learning_rate": 6.210899404936761e-06,
"loss": 0.2526,
"step": 63500
},
{
"epoch": 0.64,
"grad_norm": 3.4607479572296143,
"learning_rate": 6.061540134201248e-06,
"loss": 0.2556,
"step": 64000
},
{
"epoch": 0.645,
"grad_norm": 3.917365074157715,
"learning_rate": 5.913213656307049e-06,
"loss": 0.2601,
"step": 64500
},
{
"epoch": 0.65,
"grad_norm": 2.4207093715667725,
"learning_rate": 5.765958867302311e-06,
"loss": 0.2512,
"step": 65000
},
{
"epoch": 0.655,
"grad_norm": 2.076265335083008,
"learning_rate": 5.619814382203341e-06,
"loss": 0.2437,
"step": 65500
},
{
"epoch": 0.66,
"grad_norm": 3.774073600769043,
"learning_rate": 5.474818524868482e-06,
"loss": 0.2726,
"step": 66000
},
{
"epoch": 0.665,
"grad_norm": 3.220529079437256,
"learning_rate": 5.331009317948359e-06,
"loss": 0.2664,
"step": 66500
},
{
"epoch": 0.67,
"grad_norm": 2.737941026687622,
"learning_rate": 5.188424472915091e-06,
"loss": 0.2703,
"step": 67000
},
{
"epoch": 0.675,
"grad_norm": 2.459578275680542,
"learning_rate": 5.047101380173111e-06,
"loss": 0.244,
"step": 67500
},
{
"epoch": 0.68,
"grad_norm": 3.3251523971557617,
"learning_rate": 4.907077099254194e-06,
"loss": 0.2597,
"step": 68000
},
{
"epoch": 0.685,
"grad_norm": 2.779625654220581,
"learning_rate": 4.7683883490991985e-06,
"loss": 0.2623,
"step": 68500
},
{
"epoch": 0.69,
"grad_norm": 1.0731335878372192,
"learning_rate": 4.6310714984292096e-06,
"loss": 0.2616,
"step": 69000
},
{
"epoch": 0.695,
"grad_norm": 1.1064262390136719,
"learning_rate": 4.495162556208438e-06,
"loss": 0.2489,
"step": 69500
},
{
"epoch": 0.7,
"grad_norm": 6.329731464385986,
"learning_rate": 4.360697162201508e-06,
"loss": 0.254,
"step": 70000
},
{
"epoch": 0.705,
"grad_norm": 3.1738226413726807,
"learning_rate": 4.2277105776275395e-06,
"loss": 0.249,
"step": 70500
},
{
"epoch": 0.71,
"grad_norm": 2.834291696548462,
"learning_rate": 4.096237675913521e-06,
"loss": 0.2607,
"step": 71000
},
{
"epoch": 0.715,
"grad_norm": 2.2489569187164307,
"learning_rate": 3.966312933549325e-06,
"loss": 0.24,
"step": 71500
},
{
"epoch": 0.72,
"grad_norm": 4.554741859436035,
"learning_rate": 3.837970421046881e-06,
"loss": 0.2492,
"step": 72000
},
{
"epoch": 0.725,
"grad_norm": 4.065186977386475,
"learning_rate": 3.7112437940057543e-06,
"loss": 0.2491,
"step": 72500
},
{
"epoch": 0.73,
"grad_norm": 3.260732650756836,
"learning_rate": 3.5861662842875534e-06,
"loss": 0.2413,
"step": 73000
},
{
"epoch": 0.735,
"grad_norm": 4.497494220733643,
"learning_rate": 3.4627706913014503e-06,
"loss": 0.2537,
"step": 73500
},
{
"epoch": 0.74,
"grad_norm": 2.5874199867248535,
"learning_rate": 3.3410893734031046e-06,
"loss": 0.2627,
"step": 74000
},
{
"epoch": 0.745,
"grad_norm": 2.394810438156128,
"learning_rate": 3.2211542394092443e-06,
"loss": 0.2516,
"step": 74500
},
{
"epoch": 0.75,
"grad_norm": 2.5389678478240967,
"learning_rate": 3.102996740230131e-06,
"loss": 0.2434,
"step": 75000
},
{
"epoch": 0.755,
"grad_norm": 1.8920937776565552,
"learning_rate": 2.986647860622105e-06,
"loss": 0.2602,
"step": 75500
},
{
"epoch": 0.76,
"grad_norm": 1.569104790687561,
"learning_rate": 2.8721381110623724e-06,
"loss": 0.248,
"step": 76000
},
{
"epoch": 0.765,
"grad_norm": 2.620171308517456,
"learning_rate": 2.7594975197481546e-06,
"loss": 0.249,
"step": 76500
},
{
"epoch": 0.77,
"grad_norm": 3.781393527984619,
"learning_rate": 2.64875562472232e-06,
"loss": 0.2475,
"step": 77000
},
{
"epoch": 0.775,
"grad_norm": 4.293469429016113,
"learning_rate": 2.5399414661275444e-06,
"loss": 0.2584,
"step": 77500
},
{
"epoch": 0.78,
"grad_norm": 1.6750516891479492,
"learning_rate": 2.433083578591029e-06,
"loss": 0.2471,
"step": 78000
},
{
"epoch": 0.785,
"grad_norm": 3.109180450439453,
"learning_rate": 2.3282099837418004e-06,
"loss": 0.257,
"step": 78500
},
{
"epoch": 0.79,
"grad_norm": 3.492893695831299,
"learning_rate": 2.2253481828625144e-06,
"loss": 0.2644,
"step": 79000
},
{
"epoch": 0.795,
"grad_norm": 3.8731396198272705,
"learning_rate": 2.1245251496777274e-06,
"loss": 0.2546,
"step": 79500
},
{
"epoch": 0.8,
"grad_norm": 2.1617014408111572,
"learning_rate": 2.025767323280502e-06,
"loss": 0.2542,
"step": 80000
},
{
"epoch": 0.805,
"grad_norm": 5.593191146850586,
"learning_rate": 1.929100601199211e-06,
"loss": 0.2376,
"step": 80500
},
{
"epoch": 0.81,
"grad_norm": 3.496340036392212,
"learning_rate": 1.834550332606365e-06,
"loss": 0.234,
"step": 81000
},
{
"epoch": 0.815,
"grad_norm": 2.7082982063293457,
"learning_rate": 1.742141311671227e-06,
"loss": 0.2482,
"step": 81500
},
{
"epoch": 0.82,
"grad_norm": 2.0201199054718018,
"learning_rate": 1.6518977710579708e-06,
"loss": 0.2676,
"step": 82000
},
{
"epoch": 0.825,
"grad_norm": 2.157087564468384,
"learning_rate": 1.5638433755710892e-06,
"loss": 0.2505,
"step": 82500
},
{
"epoch": 0.83,
"grad_norm": 2.467223882675171,
"learning_rate": 1.4780012159497037e-06,
"loss": 0.2632,
"step": 83000
},
{
"epoch": 0.835,
"grad_norm": 1.9714136123657227,
"learning_rate": 1.3943938028124216e-06,
"loss": 0.2568,
"step": 83500
},
{
"epoch": 0.84,
"grad_norm": 6.337538242340088,
"learning_rate": 1.3130430607543166e-06,
"loss": 0.2555,
"step": 84000
},
{
"epoch": 0.845,
"grad_norm": 5.382758617401123,
"learning_rate": 1.2339703225975818e-06,
"loss": 0.2508,
"step": 84500
},
{
"epoch": 0.85,
"grad_norm": 2.141634464263916,
"learning_rate": 1.1571963237973693e-06,
"loss": 0.246,
"step": 85000
},
{
"epoch": 0.855,
"grad_norm": 3.525200128555298,
"learning_rate": 1.0827411970042723e-06,
"loss": 0.2526,
"step": 85500
},
{
"epoch": 0.86,
"grad_norm": 4.05570650100708,
"learning_rate": 1.010624466784892e-06,
"loss": 0.2479,
"step": 86000
},
{
"epoch": 0.865,
"grad_norm": 4.631308555603027,
"learning_rate": 9.40865044501853e-07,
"loss": 0.2495,
"step": 86500
},
{
"epoch": 0.87,
"grad_norm": 2.8069100379943848,
"learning_rate": 8.734812233546319e-07,
"loss": 0.252,
"step": 87000
},
{
"epoch": 0.875,
"grad_norm": 3.1960978507995605,
"learning_rate": 8.084906735824771e-07,
"loss": 0.2531,
"step": 87500
},
{
"epoch": 0.88,
"grad_norm": 5.496489524841309,
"learning_rate": 7.45910437830697e-07,
"loss": 0.2495,
"step": 88000
},
{
"epoch": 0.885,
"grad_norm": 3.19916033744812,
"learning_rate": 6.857569266815233e-07,
"loss": 0.2462,
"step": 88500
},
{
"epoch": 0.89,
"grad_norm": 3.560880422592163,
"learning_rate": 6.2804591435072e-07,
"loss": 0.2611,
"step": 89000
},
{
"epoch": 0.895,
"grad_norm": 1.2571769952774048,
"learning_rate": 5.727925345510644e-07,
"loss": 0.2428,
"step": 89500
},
{
"epoch": 0.9,
"grad_norm": 1.8454118967056274,
"learning_rate": 5.200112765237985e-07,
"loss": 0.25,
"step": 90000
},
{
"epoch": 0.905,
"grad_norm": 2.3532028198242188,
"learning_rate": 4.697159812390739e-07,
"loss": 0.2442,
"step": 90500
},
{
"epoch": 0.91,
"grad_norm": 3.9813899993896484,
"learning_rate": 4.21919837766398e-07,
"loss": 0.2474,
"step": 91000
},
{
"epoch": 0.915,
"grad_norm": 2.554281711578369,
"learning_rate": 3.766353798160283e-07,
"loss": 0.2429,
"step": 91500
},
{
"epoch": 0.92,
"grad_norm": 2.701397657394409,
"learning_rate": 3.338744824522266e-07,
"loss": 0.2477,
"step": 92000
},
{
"epoch": 0.925,
"grad_norm": 2.0029845237731934,
"learning_rate": 2.9364835897922606e-07,
"loss": 0.2514,
"step": 92500
},
{
"epoch": 0.93,
"grad_norm": 5.601722240447998,
"learning_rate": 2.5596755800073593e-07,
"loss": 0.247,
"step": 93000
},
{
"epoch": 0.935,
"grad_norm": 2.904860019683838,
"learning_rate": 2.208419606537593e-07,
"loss": 0.2519,
"step": 93500
},
{
"epoch": 0.94,
"grad_norm": 2.9987542629241943,
"learning_rate": 1.8828077801743006e-07,
"loss": 0.2559,
"step": 94000
},
{
"epoch": 0.945,
"grad_norm": 3.036500930786133,
"learning_rate": 1.5829254869756395e-07,
"loss": 0.2558,
"step": 94500
},
{
"epoch": 0.95,
"grad_norm": 1.084037184715271,
"learning_rate": 1.3088513658756962e-07,
"loss": 0.2485,
"step": 95000
},
{
"epoch": 0.955,
"grad_norm": 1.9537458419799805,
"learning_rate": 1.060657288062672e-07,
"loss": 0.2495,
"step": 95500
},
{
"epoch": 0.96,
"grad_norm": 2.6217870712280273,
"learning_rate": 8.384083381319464e-08,
"loss": 0.2438,
"step": 96000
},
{
"epoch": 0.965,
"grad_norm": 4.048638343811035,
"learning_rate": 6.421627970187971e-08,
"loss": 0.2466,
"step": 96500
},
{
"epoch": 0.97,
"grad_norm": 2.7693753242492676,
"learning_rate": 4.719721267152033e-08,
"loss": 0.2578,
"step": 97000
},
{
"epoch": 0.975,
"grad_norm": 4.596212863922119,
"learning_rate": 3.2788095677480644e-08,
"loss": 0.2506,
"step": 97500
},
{
"epoch": 0.98,
"grad_norm": 2.3294010162353516,
"learning_rate": 2.099270726096503e-08,
"loss": 0.2518,
"step": 98000
},
{
"epoch": 0.985,
"grad_norm": 4.738352298736572,
"learning_rate": 1.1814140558158438e-08,
"loss": 0.2449,
"step": 98500
},
{
"epoch": 0.99,
"grad_norm": 3.600982904434204,
"learning_rate": 5.254802489100819e-09,
"loss": 0.2528,
"step": 99000
},
{
"epoch": 0.995,
"grad_norm": 1.760666012763977,
"learning_rate": 1.3164131265197555e-09,
"loss": 0.253,
"step": 99500
},
{
"epoch": 1.0,
"grad_norm": 2.2342171669006348,
"learning_rate": 5.2447679532718895e-15,
"loss": 0.2454,
"step": 100000
},
{
"epoch": 1.0,
"step": 100000,
"total_flos": 1.1225596249876746e+18,
"train_loss": 0.27730819496154785,
"train_runtime": 24748.5341,
"train_samples_per_second": 4.041,
"train_steps_per_second": 4.041
}
],
"logging_steps": 500,
"max_steps": 100000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 5000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.1225596249876746e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}