m0-metamath-lr-e1-e2 / trainer_state_second.json
HelenZzz's picture
Second Epoch full run push
92c9bcc verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 100000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.005,
"grad_norm": 0.9899646043777466,
"learning_rate": 3.326666666666667e-06,
"loss": 0.7756,
"step": 500
},
{
"epoch": 0.01,
"grad_norm": 1.3953757286071777,
"learning_rate": 6.660000000000001e-06,
"loss": 0.5041,
"step": 1000
},
{
"epoch": 0.015,
"grad_norm": 2.672257900238037,
"learning_rate": 9.993333333333333e-06,
"loss": 0.4406,
"step": 1500
},
{
"epoch": 0.02,
"grad_norm": 1.0422719717025757,
"learning_rate": 1.3326666666666667e-05,
"loss": 0.3907,
"step": 2000
},
{
"epoch": 0.025,
"grad_norm": 2.749046564102173,
"learning_rate": 1.666e-05,
"loss": 0.3794,
"step": 2500
},
{
"epoch": 0.03,
"grad_norm": 1.8957346677780151,
"learning_rate": 1.9993333333333337e-05,
"loss": 0.3677,
"step": 3000
},
{
"epoch": 0.035,
"grad_norm": 2.9847750663757324,
"learning_rate": 1.9998694075951004e-05,
"loss": 0.3705,
"step": 3500
},
{
"epoch": 0.04,
"grad_norm": 3.22480845451355,
"learning_rate": 1.9994766172915367e-05,
"loss": 0.3402,
"step": 4000
},
{
"epoch": 0.045,
"grad_norm": 1.5718226432800293,
"learning_rate": 1.9988217315672814e-05,
"loss": 0.3419,
"step": 4500
},
{
"epoch": 0.05,
"grad_norm": 2.2217137813568115,
"learning_rate": 1.997904922154766e-05,
"loss": 0.325,
"step": 5000
},
{
"epoch": 0.055,
"grad_norm": 3.4321208000183105,
"learning_rate": 1.99672642947137e-05,
"loss": 0.3295,
"step": 5500
},
{
"epoch": 0.06,
"grad_norm": 3.138777017593384,
"learning_rate": 1.995286562556374e-05,
"loss": 0.3298,
"step": 6000
},
{
"epoch": 0.065,
"grad_norm": 4.528077602386475,
"learning_rate": 1.993585698989922e-05,
"loss": 0.3324,
"step": 6500
},
{
"epoch": 0.07,
"grad_norm": 1.9870072603225708,
"learning_rate": 1.9916242847940064e-05,
"loss": 0.3279,
"step": 7000
},
{
"epoch": 0.075,
"grad_norm": 2.230520248413086,
"learning_rate": 1.9894028343155063e-05,
"loss": 0.3309,
"step": 7500
},
{
"epoch": 0.08,
"grad_norm": 1.8087975978851318,
"learning_rate": 1.9869219300913088e-05,
"loss": 0.3281,
"step": 8000
},
{
"epoch": 0.085,
"grad_norm": 2.8722152709960938,
"learning_rate": 1.9841822226955496e-05,
"loss": 0.312,
"step": 8500
},
{
"epoch": 0.09,
"grad_norm": 1.97301185131073,
"learning_rate": 1.9811844305690107e-05,
"loss": 0.314,
"step": 9000
},
{
"epoch": 0.095,
"grad_norm": 2.5529353618621826,
"learning_rate": 1.977929339830722e-05,
"loss": 0.3149,
"step": 9500
},
{
"epoch": 0.1,
"grad_norm": 2.081092596054077,
"learning_rate": 1.9744178040718144e-05,
"loss": 0.3075,
"step": 10000
},
{
"epoch": 0.105,
"grad_norm": 3.256613254547119,
"learning_rate": 1.9706507441316814e-05,
"loss": 0.3174,
"step": 10500
},
{
"epoch": 0.11,
"grad_norm": 1.9820867776870728,
"learning_rate": 1.9666291478565047e-05,
"loss": 0.3239,
"step": 11000
},
{
"epoch": 0.115,
"grad_norm": 2.5789074897766113,
"learning_rate": 1.962354069840208e-05,
"loss": 0.3118,
"step": 11500
},
{
"epoch": 0.12,
"grad_norm": 3.117645740509033,
"learning_rate": 1.9578266311479082e-05,
"loss": 0.3139,
"step": 12000
},
{
"epoch": 0.125,
"grad_norm": 2.703904151916504,
"learning_rate": 1.9530480190219365e-05,
"loss": 0.3191,
"step": 12500
},
{
"epoch": 0.13,
"grad_norm": 3.2976551055908203,
"learning_rate": 1.948019486570503e-05,
"loss": 0.3041,
"step": 13000
},
{
"epoch": 0.135,
"grad_norm": 3.4138541221618652,
"learning_rate": 1.942742352439093e-05,
"loss": 0.3012,
"step": 13500
},
{
"epoch": 0.14,
"grad_norm": 3.0907506942749023,
"learning_rate": 1.937218000464673e-05,
"loss": 0.3099,
"step": 14000
},
{
"epoch": 0.145,
"grad_norm": 3.5500905513763428,
"learning_rate": 1.931447879312804e-05,
"loss": 0.3103,
"step": 14500
},
{
"epoch": 0.15,
"grad_norm": 2.548569917678833,
"learning_rate": 1.9254335020977542e-05,
"loss": 0.3076,
"step": 15000
},
{
"epoch": 0.155,
"grad_norm": 3.012566566467285,
"learning_rate": 1.9191764459857124e-05,
"loss": 0.3022,
"step": 15500
},
{
"epoch": 0.16,
"grad_norm": 2.3697686195373535,
"learning_rate": 1.9126783517812005e-05,
"loss": 0.2877,
"step": 16000
},
{
"epoch": 0.165,
"grad_norm": 2.7005627155303955,
"learning_rate": 1.9059409234968034e-05,
"loss": 0.2952,
"step": 16500
},
{
"epoch": 0.17,
"grad_norm": 1.2764240503311157,
"learning_rate": 1.8989659279063214e-05,
"loss": 0.2881,
"step": 17000
},
{
"epoch": 0.175,
"grad_norm": 4.40521240234375,
"learning_rate": 1.8917551940814615e-05,
"loss": 0.2942,
"step": 17500
},
{
"epoch": 0.18,
"grad_norm": 2.161640167236328,
"learning_rate": 1.8843106129121993e-05,
"loss": 0.2972,
"step": 18000
},
{
"epoch": 0.185,
"grad_norm": 23.8270320892334,
"learning_rate": 1.8766341366109214e-05,
"loss": 0.2898,
"step": 18500
},
{
"epoch": 0.19,
"grad_norm": 2.1488304138183594,
"learning_rate": 1.8687277782004954e-05,
"loss": 0.2778,
"step": 19000
},
{
"epoch": 0.195,
"grad_norm": 2.925344705581665,
"learning_rate": 1.8605936109863872e-05,
"loss": 0.2982,
"step": 19500
},
{
"epoch": 0.2,
"grad_norm": 2.227436065673828,
"learning_rate": 1.8522337680129728e-05,
"loss": 0.2899,
"step": 20000
},
{
"epoch": 0.205,
"grad_norm": 2.2893733978271484,
"learning_rate": 1.843650441504186e-05,
"loss": 0.2927,
"step": 20500
},
{
"epoch": 0.21,
"grad_norm": 1.1803125143051147,
"learning_rate": 1.8348458822886437e-05,
"loss": 0.2885,
"step": 21000
},
{
"epoch": 0.215,
"grad_norm": 2.5023751258850098,
"learning_rate": 1.8258223992094055e-05,
"loss": 0.2983,
"step": 21500
},
{
"epoch": 0.22,
"grad_norm": 1.0207619667053223,
"learning_rate": 1.816582358518521e-05,
"loss": 0.2885,
"step": 22000
},
{
"epoch": 0.225,
"grad_norm": 2.7328686714172363,
"learning_rate": 1.80712818325652e-05,
"loss": 0.2902,
"step": 22500
},
{
"epoch": 0.23,
"grad_norm": 2.740802049636841,
"learning_rate": 1.7974623526170113e-05,
"loss": 0.2953,
"step": 23000
},
{
"epoch": 0.235,
"grad_norm": 1.5233033895492554,
"learning_rate": 1.78758740129656e-05,
"loss": 0.2845,
"step": 23500
},
{
"epoch": 0.24,
"grad_norm": 4.8131866455078125,
"learning_rate": 1.777505918830004e-05,
"loss": 0.2851,
"step": 24000
},
{
"epoch": 0.245,
"grad_norm": 2.3986711502075195,
"learning_rate": 1.767220548911395e-05,
"loss": 0.2697,
"step": 24500
},
{
"epoch": 0.25,
"grad_norm": 2.7533788681030273,
"learning_rate": 1.756733988700737e-05,
"loss": 0.274,
"step": 25000
},
{
"epoch": 0.255,
"grad_norm": 2.176361560821533,
"learning_rate": 1.7460489881166986e-05,
"loss": 0.2848,
"step": 25500
},
{
"epoch": 0.26,
"grad_norm": 2.001997470855713,
"learning_rate": 1.7351683491154984e-05,
"loss": 0.2959,
"step": 26000
},
{
"epoch": 0.265,
"grad_norm": 2.8803274631500244,
"learning_rate": 1.7240949249561402e-05,
"loss": 0.2816,
"step": 26500
},
{
"epoch": 0.27,
"grad_norm": 2.2319493293762207,
"learning_rate": 1.7128316194521923e-05,
"loss": 0.2692,
"step": 27000
},
{
"epoch": 0.275,
"grad_norm": 3.515261173248291,
"learning_rate": 1.7013813862103163e-05,
"loss": 0.2744,
"step": 27500
},
{
"epoch": 0.28,
"grad_norm": 1.112574815750122,
"learning_rate": 1.689747227855733e-05,
"loss": 0.279,
"step": 28000
},
{
"epoch": 0.285,
"grad_norm": 2.715858221054077,
"learning_rate": 1.677932195244836e-05,
"loss": 0.2795,
"step": 28500
},
{
"epoch": 0.29,
"grad_norm": 3.356738805770874,
"learning_rate": 1.6659393866651585e-05,
"loss": 0.2698,
"step": 29000
},
{
"epoch": 0.295,
"grad_norm": 3.5027854442596436,
"learning_rate": 1.6537719470228993e-05,
"loss": 0.2751,
"step": 29500
},
{
"epoch": 0.3,
"grad_norm": 2.723360776901245,
"learning_rate": 1.6414330670182278e-05,
"loss": 0.2761,
"step": 30000
},
{
"epoch": 0.305,
"grad_norm": 2.674299955368042,
"learning_rate": 1.6289259823085765e-05,
"loss": 0.2834,
"step": 30500
},
{
"epoch": 0.31,
"grad_norm": 1.7354482412338257,
"learning_rate": 1.6162539726601477e-05,
"loss": 0.2802,
"step": 31000
},
{
"epoch": 0.315,
"grad_norm": 3.5115904808044434,
"learning_rate": 1.603420361087849e-05,
"loss": 0.2791,
"step": 31500
},
{
"epoch": 0.32,
"grad_norm": 1.5374090671539307,
"learning_rate": 1.5904285129838924e-05,
"loss": 0.2803,
"step": 32000
},
{
"epoch": 0.325,
"grad_norm": 2.6573033332824707,
"learning_rate": 1.577281835235279e-05,
"loss": 0.2719,
"step": 32500
},
{
"epoch": 0.33,
"grad_norm": 2.28947377204895,
"learning_rate": 1.563983775330399e-05,
"loss": 0.2699,
"step": 33000
},
{
"epoch": 0.335,
"grad_norm": 2.1672635078430176,
"learning_rate": 1.5505378204549907e-05,
"loss": 0.2779,
"step": 33500
},
{
"epoch": 0.34,
"grad_norm": 5.222517490386963,
"learning_rate": 1.536947496577684e-05,
"loss": 0.2792,
"step": 34000
},
{
"epoch": 0.345,
"grad_norm": 2.769939661026001,
"learning_rate": 1.5232163675253793e-05,
"loss": 0.2829,
"step": 34500
},
{
"epoch": 0.35,
"grad_norm": 1.86508309841156,
"learning_rate": 1.5093480340486922e-05,
"loss": 0.2696,
"step": 35000
},
{
"epoch": 0.355,
"grad_norm": 2.6956353187561035,
"learning_rate": 1.495346132877721e-05,
"loss": 0.2694,
"step": 35500
},
{
"epoch": 0.36,
"grad_norm": 3.8267648220062256,
"learning_rate": 1.4812143357683773e-05,
"loss": 0.2557,
"step": 36000
},
{
"epoch": 0.365,
"grad_norm": 2.288548231124878,
"learning_rate": 1.4669563485395315e-05,
"loss": 0.2736,
"step": 36500
},
{
"epoch": 0.37,
"grad_norm": 2.274164915084839,
"learning_rate": 1.4525759101012249e-05,
"loss": 0.2778,
"step": 37000
},
{
"epoch": 0.375,
"grad_norm": 2.6275923252105713,
"learning_rate": 1.4380767914742074e-05,
"loss": 0.2706,
"step": 37500
},
{
"epoch": 0.38,
"grad_norm": 1.9517382383346558,
"learning_rate": 1.4234627948010497e-05,
"loss": 0.2604,
"step": 38000
},
{
"epoch": 0.385,
"grad_norm": 2.431234121322632,
"learning_rate": 1.4087377523491016e-05,
"loss": 0.2752,
"step": 38500
},
{
"epoch": 0.39,
"grad_norm": 2.1692144870758057,
"learning_rate": 1.3939055255055419e-05,
"loss": 0.2602,
"step": 39000
},
{
"epoch": 0.395,
"grad_norm": 3.2636561393737793,
"learning_rate": 1.3789700037647988e-05,
"loss": 0.2676,
"step": 39500
},
{
"epoch": 0.4,
"grad_norm": 4.37136173248291,
"learning_rate": 1.363935103708598e-05,
"loss": 0.2586,
"step": 40000
},
{
"epoch": 0.405,
"grad_norm": 4.321747779846191,
"learning_rate": 1.3488047679789052e-05,
"loss": 0.2738,
"step": 40500
},
{
"epoch": 0.41,
"grad_norm": 2.1585159301757812,
"learning_rate": 1.3335829642440372e-05,
"loss": 0.2633,
"step": 41000
},
{
"epoch": 0.415,
"grad_norm": 2.787271738052368,
"learning_rate": 1.3182736841582109e-05,
"loss": 0.2722,
"step": 41500
},
{
"epoch": 0.42,
"grad_norm": 2.1286282539367676,
"learning_rate": 1.3028809423147997e-05,
"loss": 0.2794,
"step": 42000
},
{
"epoch": 0.425,
"grad_norm": 2.528885841369629,
"learning_rate": 1.2874087751935756e-05,
"loss": 0.2704,
"step": 42500
},
{
"epoch": 0.43,
"grad_norm": 2.1489899158477783,
"learning_rate": 1.2718612401022148e-05,
"loss": 0.2683,
"step": 43000
},
{
"epoch": 0.435,
"grad_norm": 2.373365640640259,
"learning_rate": 1.2562424141123364e-05,
"loss": 0.2703,
"step": 43500
},
{
"epoch": 0.44,
"grad_norm": 2.9887542724609375,
"learning_rate": 1.2405563929903614e-05,
"loss": 0.2692,
"step": 44000
},
{
"epoch": 0.445,
"grad_norm": 3.2291131019592285,
"learning_rate": 1.2248072901234715e-05,
"loss": 0.2639,
"step": 44500
},
{
"epoch": 0.45,
"grad_norm": 2.0922982692718506,
"learning_rate": 1.2089992354409415e-05,
"loss": 0.2679,
"step": 45000
},
{
"epoch": 0.455,
"grad_norm": 2.3987605571746826,
"learning_rate": 1.1931363743311377e-05,
"loss": 0.2644,
"step": 45500
},
{
"epoch": 0.46,
"grad_norm": 2.9259192943573,
"learning_rate": 1.1772228665544634e-05,
"loss": 0.2627,
"step": 46000
},
{
"epoch": 0.465,
"grad_norm": 1.9953309297561646,
"learning_rate": 1.1612628851525313e-05,
"loss": 0.2694,
"step": 46500
},
{
"epoch": 0.47,
"grad_norm": 2.523066759109497,
"learning_rate": 1.1452606153538577e-05,
"loss": 0.2634,
"step": 47000
},
{
"epoch": 0.475,
"grad_norm": 1.3646570444107056,
"learning_rate": 1.129220253476358e-05,
"loss": 0.2648,
"step": 47500
},
{
"epoch": 0.48,
"grad_norm": 2.9045891761779785,
"learning_rate": 1.1131460058269342e-05,
"loss": 0.2545,
"step": 48000
},
{
"epoch": 0.485,
"grad_norm": 2.251849889755249,
"learning_rate": 1.0970420875984453e-05,
"loss": 0.2634,
"step": 48500
},
{
"epoch": 0.49,
"grad_norm": 3.584717035293579,
"learning_rate": 1.0809127217643462e-05,
"loss": 0.2632,
"step": 49000
},
{
"epoch": 0.495,
"grad_norm": 2.139634132385254,
"learning_rate": 1.0647621379712843e-05,
"loss": 0.2655,
"step": 49500
},
{
"epoch": 0.5,
"grad_norm": 1.8866384029388428,
"learning_rate": 1.0485945714299487e-05,
"loss": 0.2543,
"step": 50000
},
{
"epoch": 0.505,
"grad_norm": 2.74698543548584,
"learning_rate": 1.0324142618044612e-05,
"loss": 0.2686,
"step": 50500
},
{
"epoch": 0.51,
"grad_norm": 1.884582281112671,
"learning_rate": 1.0162254521005956e-05,
"loss": 0.2631,
"step": 51000
},
{
"epoch": 0.515,
"grad_norm": 2.098027467727661,
"learning_rate": 1.0000323875531241e-05,
"loss": 0.2598,
"step": 51500
},
{
"epoch": 0.52,
"grad_norm": 2.741763114929199,
"learning_rate": 9.838393145125787e-06,
"loss": 0.2611,
"step": 52000
},
{
"epoch": 0.525,
"grad_norm": 3.3235342502593994,
"learning_rate": 9.676504793317163e-06,
"loss": 0.2585,
"step": 52500
},
{
"epoch": 0.53,
"grad_norm": 2.0972297191619873,
"learning_rate": 9.514701272519918e-06,
"loss": 0.2542,
"step": 53000
},
{
"epoch": 0.535,
"grad_norm": 1.5382198095321655,
"learning_rate": 9.353025012903112e-06,
"loss": 0.2642,
"step": 53500
},
{
"epoch": 0.54,
"grad_norm": 1.974549412727356,
"learning_rate": 9.191518411263777e-06,
"loss": 0.2419,
"step": 54000
},
{
"epoch": 0.545,
"grad_norm": 2.693479061126709,
"learning_rate": 9.03022381990908e-06,
"loss": 0.2691,
"step": 54500
},
{
"epoch": 0.55,
"grad_norm": 5.070830821990967,
"learning_rate": 8.869183535550167e-06,
"loss": 0.2734,
"step": 55000
},
{
"epoch": 0.555,
"grad_norm": 3.0315475463867188,
"learning_rate": 8.708439788210573e-06,
"loss": 0.2692,
"step": 55500
},
{
"epoch": 0.56,
"grad_norm": 3.1777706146240234,
"learning_rate": 8.548034730152153e-06,
"loss": 0.2625,
"step": 56000
},
{
"epoch": 0.565,
"grad_norm": 2.1810688972473145,
"learning_rate": 8.388010424821354e-06,
"loss": 0.2651,
"step": 56500
},
{
"epoch": 0.57,
"grad_norm": 2.2163448333740234,
"learning_rate": 8.22840883581883e-06,
"loss": 0.2526,
"step": 57000
},
{
"epoch": 0.575,
"grad_norm": 2.2531285285949707,
"learning_rate": 8.069271815895196e-06,
"loss": 0.2809,
"step": 57500
},
{
"epoch": 0.58,
"grad_norm": 4.996068954467773,
"learning_rate": 7.910641095975886e-06,
"loss": 0.2585,
"step": 58000
},
{
"epoch": 0.585,
"grad_norm": 1.0134843587875366,
"learning_rate": 7.752558274217919e-06,
"loss": 0.2537,
"step": 58500
},
{
"epoch": 0.59,
"grad_norm": 3.20263671875,
"learning_rate": 7.595064805101538e-06,
"loss": 0.2506,
"step": 59000
},
{
"epoch": 0.595,
"grad_norm": 2.070533275604248,
"learning_rate": 7.438201988559449e-06,
"loss": 0.2648,
"step": 59500
},
{
"epoch": 0.6,
"grad_norm": 5.259614944458008,
"learning_rate": 7.282010959146662e-06,
"loss": 0.2647,
"step": 60000
},
{
"epoch": 0.605,
"grad_norm": 4.084048748016357,
"learning_rate": 7.1265326752536545e-06,
"loss": 0.2632,
"step": 60500
},
{
"epoch": 0.61,
"grad_norm": 2.7259840965270996,
"learning_rate": 6.971807908365737e-06,
"loss": 0.2584,
"step": 61000
},
{
"epoch": 0.615,
"grad_norm": 2.913233995437622,
"learning_rate": 6.817877232371453e-06,
"loss": 0.2655,
"step": 61500
},
{
"epoch": 0.62,
"grad_norm": 2.728580951690674,
"learning_rate": 6.66478101292277e-06,
"loss": 0.2502,
"step": 62000
},
{
"epoch": 0.625,
"grad_norm": 3.386467933654785,
"learning_rate": 6.5125593968498855e-06,
"loss": 0.2458,
"step": 62500
},
{
"epoch": 0.63,
"grad_norm": 3.570934534072876,
"learning_rate": 6.361252301633422e-06,
"loss": 0.2637,
"step": 63000
},
{
"epoch": 0.635,
"grad_norm": 2.603757619857788,
"learning_rate": 6.210899404936761e-06,
"loss": 0.2535,
"step": 63500
},
{
"epoch": 0.64,
"grad_norm": 3.9069035053253174,
"learning_rate": 6.061540134201248e-06,
"loss": 0.2564,
"step": 64000
},
{
"epoch": 0.645,
"grad_norm": 4.281370162963867,
"learning_rate": 5.913213656307049e-06,
"loss": 0.2602,
"step": 64500
},
{
"epoch": 0.65,
"grad_norm": 2.389120101928711,
"learning_rate": 5.765958867302311e-06,
"loss": 0.252,
"step": 65000
},
{
"epoch": 0.655,
"grad_norm": 2.0732436180114746,
"learning_rate": 5.619814382203341e-06,
"loss": 0.2439,
"step": 65500
},
{
"epoch": 0.66,
"grad_norm": 3.80635142326355,
"learning_rate": 5.474818524868482e-06,
"loss": 0.274,
"step": 66000
},
{
"epoch": 0.665,
"grad_norm": 2.9537928104400635,
"learning_rate": 5.331009317948359e-06,
"loss": 0.2676,
"step": 66500
},
{
"epoch": 0.67,
"grad_norm": 2.7541537284851074,
"learning_rate": 5.188424472915091e-06,
"loss": 0.2708,
"step": 67000
},
{
"epoch": 0.675,
"grad_norm": 2.415677070617676,
"learning_rate": 5.047101380173111e-06,
"loss": 0.2449,
"step": 67500
},
{
"epoch": 0.68,
"grad_norm": 3.56915020942688,
"learning_rate": 4.907077099254194e-06,
"loss": 0.2608,
"step": 68000
},
{
"epoch": 0.685,
"grad_norm": 2.8096132278442383,
"learning_rate": 4.7683883490991985e-06,
"loss": 0.263,
"step": 68500
},
{
"epoch": 0.69,
"grad_norm": 1.026166558265686,
"learning_rate": 4.6310714984292096e-06,
"loss": 0.2621,
"step": 69000
},
{
"epoch": 0.695,
"grad_norm": 0.8848626613616943,
"learning_rate": 4.495162556208438e-06,
"loss": 0.2505,
"step": 69500
},
{
"epoch": 0.7,
"grad_norm": 6.4718217849731445,
"learning_rate": 4.360697162201508e-06,
"loss": 0.2543,
"step": 70000
},
{
"epoch": 0.705,
"grad_norm": 3.0504908561706543,
"learning_rate": 4.2277105776275395e-06,
"loss": 0.2496,
"step": 70500
},
{
"epoch": 0.71,
"grad_norm": 2.385812997817993,
"learning_rate": 4.096237675913521e-06,
"loss": 0.2618,
"step": 71000
},
{
"epoch": 0.715,
"grad_norm": 2.370842695236206,
"learning_rate": 3.966312933549325e-06,
"loss": 0.2407,
"step": 71500
},
{
"epoch": 0.72,
"grad_norm": 4.309947967529297,
"learning_rate": 3.837970421046881e-06,
"loss": 0.25,
"step": 72000
},
{
"epoch": 0.725,
"grad_norm": 3.7516446113586426,
"learning_rate": 3.7112437940057543e-06,
"loss": 0.2499,
"step": 72500
},
{
"epoch": 0.73,
"grad_norm": 2.502753973007202,
"learning_rate": 3.5861662842875534e-06,
"loss": 0.2424,
"step": 73000
},
{
"epoch": 0.735,
"grad_norm": 4.212218761444092,
"learning_rate": 3.4627706913014503e-06,
"loss": 0.2549,
"step": 73500
},
{
"epoch": 0.74,
"grad_norm": 2.6515533924102783,
"learning_rate": 3.3410893734031046e-06,
"loss": 0.2633,
"step": 74000
},
{
"epoch": 0.745,
"grad_norm": 2.6154916286468506,
"learning_rate": 3.2211542394092443e-06,
"loss": 0.2523,
"step": 74500
},
{
"epoch": 0.75,
"grad_norm": 2.3544111251831055,
"learning_rate": 3.102996740230131e-06,
"loss": 0.2445,
"step": 75000
},
{
"epoch": 0.755,
"grad_norm": 1.8452802896499634,
"learning_rate": 2.986647860622105e-06,
"loss": 0.2612,
"step": 75500
},
{
"epoch": 0.76,
"grad_norm": 1.7021652460098267,
"learning_rate": 2.8721381110623724e-06,
"loss": 0.2494,
"step": 76000
},
{
"epoch": 0.765,
"grad_norm": 2.5925469398498535,
"learning_rate": 2.7594975197481546e-06,
"loss": 0.25,
"step": 76500
},
{
"epoch": 0.77,
"grad_norm": 3.866541862487793,
"learning_rate": 2.64875562472232e-06,
"loss": 0.2483,
"step": 77000
},
{
"epoch": 0.775,
"grad_norm": 4.389611721038818,
"learning_rate": 2.5399414661275444e-06,
"loss": 0.2588,
"step": 77500
},
{
"epoch": 0.78,
"grad_norm": 1.7422980070114136,
"learning_rate": 2.433083578591029e-06,
"loss": 0.248,
"step": 78000
},
{
"epoch": 0.785,
"grad_norm": 3.3398666381835938,
"learning_rate": 2.3282099837418004e-06,
"loss": 0.2582,
"step": 78500
},
{
"epoch": 0.79,
"grad_norm": 3.718583822250366,
"learning_rate": 2.2253481828625144e-06,
"loss": 0.2661,
"step": 79000
},
{
"epoch": 0.795,
"grad_norm": 3.974256753921509,
"learning_rate": 2.1245251496777274e-06,
"loss": 0.2553,
"step": 79500
},
{
"epoch": 0.8,
"grad_norm": 2.317969799041748,
"learning_rate": 2.025767323280502e-06,
"loss": 0.2556,
"step": 80000
},
{
"epoch": 0.805,
"grad_norm": 5.751351356506348,
"learning_rate": 1.929100601199211e-06,
"loss": 0.2388,
"step": 80500
},
{
"epoch": 0.81,
"grad_norm": 3.3387367725372314,
"learning_rate": 1.834550332606365e-06,
"loss": 0.2353,
"step": 81000
},
{
"epoch": 0.815,
"grad_norm": 2.7964117527008057,
"learning_rate": 1.742141311671227e-06,
"loss": 0.2499,
"step": 81500
},
{
"epoch": 0.82,
"grad_norm": 2.3127243518829346,
"learning_rate": 1.6518977710579708e-06,
"loss": 0.2693,
"step": 82000
},
{
"epoch": 0.825,
"grad_norm": 2.1335015296936035,
"learning_rate": 1.5638433755710892e-06,
"loss": 0.2516,
"step": 82500
},
{
"epoch": 0.83,
"grad_norm": 2.2353951930999756,
"learning_rate": 1.4780012159497037e-06,
"loss": 0.2643,
"step": 83000
},
{
"epoch": 0.835,
"grad_norm": 2.0724055767059326,
"learning_rate": 1.3943938028124216e-06,
"loss": 0.2568,
"step": 83500
},
{
"epoch": 0.84,
"grad_norm": 6.218677520751953,
"learning_rate": 1.3130430607543166e-06,
"loss": 0.2567,
"step": 84000
},
{
"epoch": 0.845,
"grad_norm": 5.340097904205322,
"learning_rate": 1.2339703225975818e-06,
"loss": 0.2517,
"step": 84500
},
{
"epoch": 0.85,
"grad_norm": 2.1726861000061035,
"learning_rate": 1.1571963237973693e-06,
"loss": 0.2463,
"step": 85000
},
{
"epoch": 0.855,
"grad_norm": 3.2792880535125732,
"learning_rate": 1.0827411970042723e-06,
"loss": 0.2534,
"step": 85500
},
{
"epoch": 0.86,
"grad_norm": 3.8043406009674072,
"learning_rate": 1.010624466784892e-06,
"loss": 0.249,
"step": 86000
},
{
"epoch": 0.865,
"grad_norm": 4.719531059265137,
"learning_rate": 9.40865044501853e-07,
"loss": 0.2503,
"step": 86500
},
{
"epoch": 0.87,
"grad_norm": 2.73796010017395,
"learning_rate": 8.734812233546319e-07,
"loss": 0.2535,
"step": 87000
},
{
"epoch": 0.875,
"grad_norm": 2.953639030456543,
"learning_rate": 8.084906735824771e-07,
"loss": 0.2544,
"step": 87500
},
{
"epoch": 0.88,
"grad_norm": 5.714886665344238,
"learning_rate": 7.45910437830697e-07,
"loss": 0.2505,
"step": 88000
},
{
"epoch": 0.885,
"grad_norm": 3.277892827987671,
"learning_rate": 6.857569266815233e-07,
"loss": 0.2474,
"step": 88500
},
{
"epoch": 0.89,
"grad_norm": 3.6936960220336914,
"learning_rate": 6.2804591435072e-07,
"loss": 0.2611,
"step": 89000
},
{
"epoch": 0.895,
"grad_norm": 1.2861735820770264,
"learning_rate": 5.727925345510644e-07,
"loss": 0.2432,
"step": 89500
},
{
"epoch": 0.9,
"grad_norm": 1.9155391454696655,
"learning_rate": 5.200112765237985e-07,
"loss": 0.2508,
"step": 90000
},
{
"epoch": 0.905,
"grad_norm": 1.7853316068649292,
"learning_rate": 4.697159812390739e-07,
"loss": 0.2452,
"step": 90500
},
{
"epoch": 0.91,
"grad_norm": 4.1483235359191895,
"learning_rate": 4.21919837766398e-07,
"loss": 0.2483,
"step": 91000
},
{
"epoch": 0.915,
"grad_norm": 2.7010982036590576,
"learning_rate": 3.766353798160283e-07,
"loss": 0.2435,
"step": 91500
},
{
"epoch": 0.92,
"grad_norm": 2.720015048980713,
"learning_rate": 3.338744824522266e-07,
"loss": 0.2488,
"step": 92000
},
{
"epoch": 0.925,
"grad_norm": 2.154423952102661,
"learning_rate": 2.9364835897922606e-07,
"loss": 0.2523,
"step": 92500
},
{
"epoch": 0.93,
"grad_norm": 5.67596960067749,
"learning_rate": 2.5596755800073593e-07,
"loss": 0.2483,
"step": 93000
},
{
"epoch": 0.935,
"grad_norm": 2.8991081714630127,
"learning_rate": 2.208419606537593e-07,
"loss": 0.2536,
"step": 93500
},
{
"epoch": 0.94,
"grad_norm": 2.9144272804260254,
"learning_rate": 1.8828077801743006e-07,
"loss": 0.2557,
"step": 94000
},
{
"epoch": 0.945,
"grad_norm": 3.0168917179107666,
"learning_rate": 1.5829254869756395e-07,
"loss": 0.2567,
"step": 94500
},
{
"epoch": 0.95,
"grad_norm": 0.992645800113678,
"learning_rate": 1.3088513658756962e-07,
"loss": 0.2498,
"step": 95000
},
{
"epoch": 0.955,
"grad_norm": 2.234450101852417,
"learning_rate": 1.060657288062672e-07,
"loss": 0.2503,
"step": 95500
},
{
"epoch": 0.96,
"grad_norm": 2.546241521835327,
"learning_rate": 8.384083381319464e-08,
"loss": 0.2446,
"step": 96000
},
{
"epoch": 0.965,
"grad_norm": 4.158427715301514,
"learning_rate": 6.421627970187971e-08,
"loss": 0.2467,
"step": 96500
},
{
"epoch": 0.97,
"grad_norm": 2.6824352741241455,
"learning_rate": 4.719721267152033e-08,
"loss": 0.2586,
"step": 97000
},
{
"epoch": 0.975,
"grad_norm": 4.74870491027832,
"learning_rate": 3.2788095677480644e-08,
"loss": 0.252,
"step": 97500
},
{
"epoch": 0.98,
"grad_norm": 2.3958590030670166,
"learning_rate": 2.099270726096503e-08,
"loss": 0.2527,
"step": 98000
},
{
"epoch": 0.985,
"grad_norm": 4.80270528793335,
"learning_rate": 1.1814140558158438e-08,
"loss": 0.2459,
"step": 98500
},
{
"epoch": 0.99,
"grad_norm": 3.4724161624908447,
"learning_rate": 5.254802489100819e-09,
"loss": 0.254,
"step": 99000
},
{
"epoch": 0.995,
"grad_norm": 1.695827603340149,
"learning_rate": 1.3164131265197555e-09,
"loss": 0.2544,
"step": 99500
},
{
"epoch": 1.0,
"grad_norm": 2.3921620845794678,
"learning_rate": 5.2447679532718895e-15,
"loss": 0.2468,
"step": 100000
}
],
"logging_steps": 500,
"max_steps": 100000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 5000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.1222027981664584e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}