gpt_f_experiment_large / trainer_state.json
mllm-dev's picture
Upload folder using huggingface_hub
33cac43 verified
{
"best_metric": 0.750106155872345,
"best_model_checkpoint": "sean_test_out_large/checkpoint-108334",
"epoch": 1.0,
"eval_steps": 500,
"global_step": 108334,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 1387778.25,
"learning_rate": 5.972307862720845e-05,
"loss": 1.1897,
"step": 500
},
{
"epoch": 0.01,
"grad_norm": 664302.625,
"learning_rate": 5.94461572544169e-05,
"loss": 1.0759,
"step": 1000
},
{
"epoch": 0.01,
"grad_norm": 428486.84375,
"learning_rate": 5.9169235881625344e-05,
"loss": 1.0728,
"step": 1500
},
{
"epoch": 0.02,
"grad_norm": 299683.625,
"learning_rate": 5.8892314508833796e-05,
"loss": 1.0409,
"step": 2000
},
{
"epoch": 0.02,
"grad_norm": 503517.40625,
"learning_rate": 5.861539313604224e-05,
"loss": 1.0335,
"step": 2500
},
{
"epoch": 0.03,
"grad_norm": 657503.8125,
"learning_rate": 5.833847176325069e-05,
"loss": 0.9533,
"step": 3000
},
{
"epoch": 0.03,
"grad_norm": 533787.3125,
"learning_rate": 5.806155039045914e-05,
"loss": 0.9992,
"step": 3500
},
{
"epoch": 0.04,
"grad_norm": 913784.6875,
"learning_rate": 5.7784629017667584e-05,
"loss": 0.9657,
"step": 4000
},
{
"epoch": 0.04,
"grad_norm": 310152.21875,
"learning_rate": 5.7507707644876036e-05,
"loss": 0.9652,
"step": 4500
},
{
"epoch": 0.05,
"grad_norm": 309690.34375,
"learning_rate": 5.723078627208448e-05,
"loss": 0.969,
"step": 5000
},
{
"epoch": 0.05,
"grad_norm": 211456.5,
"learning_rate": 5.695386489929293e-05,
"loss": 0.9515,
"step": 5500
},
{
"epoch": 0.06,
"grad_norm": 391416.65625,
"learning_rate": 5.667694352650138e-05,
"loss": 0.9221,
"step": 6000
},
{
"epoch": 0.06,
"grad_norm": 221054.703125,
"learning_rate": 5.6400022153709824e-05,
"loss": 0.9679,
"step": 6500
},
{
"epoch": 0.06,
"grad_norm": 472076.15625,
"learning_rate": 5.612310078091827e-05,
"loss": 0.9519,
"step": 7000
},
{
"epoch": 0.07,
"grad_norm": 988913.375,
"learning_rate": 5.584617940812672e-05,
"loss": 0.9447,
"step": 7500
},
{
"epoch": 0.07,
"grad_norm": 661127.75,
"learning_rate": 5.556925803533517e-05,
"loss": 0.9009,
"step": 8000
},
{
"epoch": 0.08,
"grad_norm": 212504.640625,
"learning_rate": 5.529233666254362e-05,
"loss": 0.9456,
"step": 8500
},
{
"epoch": 0.08,
"grad_norm": 473405.96875,
"learning_rate": 5.5015415289752064e-05,
"loss": 0.9385,
"step": 9000
},
{
"epoch": 0.09,
"grad_norm": 266084.78125,
"learning_rate": 5.473849391696051e-05,
"loss": 1.0131,
"step": 9500
},
{
"epoch": 0.09,
"grad_norm": 491548.53125,
"learning_rate": 5.446157254416896e-05,
"loss": 0.9254,
"step": 10000
},
{
"epoch": 0.1,
"grad_norm": 267938.4375,
"learning_rate": 5.418465117137741e-05,
"loss": 0.9101,
"step": 10500
},
{
"epoch": 0.1,
"grad_norm": 258540.84375,
"learning_rate": 5.390772979858586e-05,
"loss": 0.9234,
"step": 11000
},
{
"epoch": 0.11,
"grad_norm": 200250.609375,
"learning_rate": 5.3630808425794305e-05,
"loss": 0.9157,
"step": 11500
},
{
"epoch": 0.11,
"grad_norm": 480926.25,
"learning_rate": 5.335388705300275e-05,
"loss": 0.9532,
"step": 12000
},
{
"epoch": 0.12,
"grad_norm": 593979.4375,
"learning_rate": 5.30769656802112e-05,
"loss": 0.9184,
"step": 12500
},
{
"epoch": 0.12,
"grad_norm": 686144.1875,
"learning_rate": 5.280004430741965e-05,
"loss": 0.9215,
"step": 13000
},
{
"epoch": 0.12,
"grad_norm": 582782.0,
"learning_rate": 5.25231229346281e-05,
"loss": 0.9151,
"step": 13500
},
{
"epoch": 0.13,
"grad_norm": 554729.875,
"learning_rate": 5.2246201561836545e-05,
"loss": 0.9042,
"step": 14000
},
{
"epoch": 0.13,
"grad_norm": 423190.0,
"learning_rate": 5.196928018904499e-05,
"loss": 0.9114,
"step": 14500
},
{
"epoch": 0.14,
"grad_norm": 724497.5,
"learning_rate": 5.169235881625344e-05,
"loss": 0.9123,
"step": 15000
},
{
"epoch": 0.14,
"grad_norm": 333863.78125,
"learning_rate": 5.141543744346189e-05,
"loss": 0.9237,
"step": 15500
},
{
"epoch": 0.15,
"grad_norm": 282522.5,
"learning_rate": 5.113851607067033e-05,
"loss": 0.8927,
"step": 16000
},
{
"epoch": 0.15,
"grad_norm": 326572.34375,
"learning_rate": 5.0861594697878785e-05,
"loss": 0.9256,
"step": 16500
},
{
"epoch": 0.16,
"grad_norm": 454635.34375,
"learning_rate": 5.058467332508723e-05,
"loss": 0.8991,
"step": 17000
},
{
"epoch": 0.16,
"grad_norm": 306963.25,
"learning_rate": 5.030775195229568e-05,
"loss": 0.8937,
"step": 17500
},
{
"epoch": 0.17,
"grad_norm": 580224.0,
"learning_rate": 5.003083057950413e-05,
"loss": 0.9108,
"step": 18000
},
{
"epoch": 0.17,
"grad_norm": 549220.75,
"learning_rate": 4.975390920671257e-05,
"loss": 0.9307,
"step": 18500
},
{
"epoch": 0.18,
"grad_norm": 188370.0,
"learning_rate": 4.9476987833921025e-05,
"loss": 0.9009,
"step": 19000
},
{
"epoch": 0.18,
"grad_norm": 535151.8125,
"learning_rate": 4.920006646112947e-05,
"loss": 0.8773,
"step": 19500
},
{
"epoch": 0.18,
"grad_norm": 675718.1875,
"learning_rate": 4.892314508833792e-05,
"loss": 0.8984,
"step": 20000
},
{
"epoch": 0.19,
"grad_norm": 247562.0,
"learning_rate": 4.864622371554637e-05,
"loss": 0.8896,
"step": 20500
},
{
"epoch": 0.19,
"grad_norm": 412286.65625,
"learning_rate": 4.836930234275481e-05,
"loss": 0.8914,
"step": 21000
},
{
"epoch": 0.2,
"grad_norm": 362339.53125,
"learning_rate": 4.8092380969963265e-05,
"loss": 0.8906,
"step": 21500
},
{
"epoch": 0.2,
"grad_norm": 307259.8125,
"learning_rate": 4.781545959717171e-05,
"loss": 0.8615,
"step": 22000
},
{
"epoch": 0.21,
"grad_norm": 264488.625,
"learning_rate": 4.753853822438016e-05,
"loss": 0.896,
"step": 22500
},
{
"epoch": 0.21,
"grad_norm": 392361.15625,
"learning_rate": 4.726161685158861e-05,
"loss": 0.8802,
"step": 23000
},
{
"epoch": 0.22,
"grad_norm": 844823.375,
"learning_rate": 4.698469547879705e-05,
"loss": 0.8927,
"step": 23500
},
{
"epoch": 0.22,
"grad_norm": 252408.484375,
"learning_rate": 4.6707774106005505e-05,
"loss": 0.8942,
"step": 24000
},
{
"epoch": 0.23,
"grad_norm": 591983.0625,
"learning_rate": 4.643085273321395e-05,
"loss": 0.8859,
"step": 24500
},
{
"epoch": 0.23,
"grad_norm": 284245.46875,
"learning_rate": 4.6153931360422396e-05,
"loss": 0.8953,
"step": 25000
},
{
"epoch": 0.24,
"grad_norm": 288328.15625,
"learning_rate": 4.587700998763085e-05,
"loss": 0.8731,
"step": 25500
},
{
"epoch": 0.24,
"grad_norm": 422104.84375,
"learning_rate": 4.560008861483929e-05,
"loss": 0.8695,
"step": 26000
},
{
"epoch": 0.24,
"grad_norm": 450527.375,
"learning_rate": 4.5323167242047745e-05,
"loss": 0.8616,
"step": 26500
},
{
"epoch": 0.25,
"grad_norm": 440466.5,
"learning_rate": 4.504624586925619e-05,
"loss": 0.8871,
"step": 27000
},
{
"epoch": 0.25,
"grad_norm": 386183.34375,
"learning_rate": 4.4769324496464636e-05,
"loss": 0.8645,
"step": 27500
},
{
"epoch": 0.26,
"grad_norm": 542583.0,
"learning_rate": 4.449240312367309e-05,
"loss": 0.8755,
"step": 28000
},
{
"epoch": 0.26,
"grad_norm": 497213.90625,
"learning_rate": 4.421548175088153e-05,
"loss": 0.8663,
"step": 28500
},
{
"epoch": 0.27,
"grad_norm": 1084880.375,
"learning_rate": 4.3938560378089985e-05,
"loss": 0.8371,
"step": 29000
},
{
"epoch": 0.27,
"grad_norm": 365434.125,
"learning_rate": 4.366163900529843e-05,
"loss": 0.8791,
"step": 29500
},
{
"epoch": 0.28,
"grad_norm": 310639.5,
"learning_rate": 4.3384717632506876e-05,
"loss": 0.8298,
"step": 30000
},
{
"epoch": 0.28,
"grad_norm": 1322288.0,
"learning_rate": 4.310779625971533e-05,
"loss": 0.8658,
"step": 30500
},
{
"epoch": 0.29,
"grad_norm": 488033.0625,
"learning_rate": 4.283087488692377e-05,
"loss": 0.8556,
"step": 31000
},
{
"epoch": 0.29,
"grad_norm": 404639.3125,
"learning_rate": 4.2553953514132225e-05,
"loss": 0.8547,
"step": 31500
},
{
"epoch": 0.3,
"grad_norm": 324229.59375,
"learning_rate": 4.227703214134067e-05,
"loss": 0.8853,
"step": 32000
},
{
"epoch": 0.3,
"grad_norm": 391828.6875,
"learning_rate": 4.2000110768549116e-05,
"loss": 0.8725,
"step": 32500
},
{
"epoch": 0.3,
"grad_norm": 481346.0,
"learning_rate": 4.172318939575757e-05,
"loss": 0.8484,
"step": 33000
},
{
"epoch": 0.31,
"grad_norm": 500705.90625,
"learning_rate": 4.144626802296601e-05,
"loss": 0.8415,
"step": 33500
},
{
"epoch": 0.31,
"grad_norm": 484916.0625,
"learning_rate": 4.116934665017446e-05,
"loss": 0.8464,
"step": 34000
},
{
"epoch": 0.32,
"grad_norm": 305320.15625,
"learning_rate": 4.089242527738291e-05,
"loss": 0.8439,
"step": 34500
},
{
"epoch": 0.32,
"grad_norm": 275214.6875,
"learning_rate": 4.0615503904591356e-05,
"loss": 0.8257,
"step": 35000
},
{
"epoch": 0.33,
"grad_norm": 364166.0,
"learning_rate": 4.033858253179981e-05,
"loss": 0.8763,
"step": 35500
},
{
"epoch": 0.33,
"grad_norm": 285496.46875,
"learning_rate": 4.006166115900825e-05,
"loss": 0.856,
"step": 36000
},
{
"epoch": 0.34,
"grad_norm": 959005.0625,
"learning_rate": 3.97847397862167e-05,
"loss": 0.8306,
"step": 36500
},
{
"epoch": 0.34,
"grad_norm": 357090.28125,
"learning_rate": 3.950781841342515e-05,
"loss": 0.8564,
"step": 37000
},
{
"epoch": 0.35,
"grad_norm": 605763.8125,
"learning_rate": 3.9230897040633596e-05,
"loss": 0.8476,
"step": 37500
},
{
"epoch": 0.35,
"grad_norm": 418555.625,
"learning_rate": 3.895397566784205e-05,
"loss": 0.857,
"step": 38000
},
{
"epoch": 0.36,
"grad_norm": 726787.125,
"learning_rate": 3.8677054295050493e-05,
"loss": 0.8627,
"step": 38500
},
{
"epoch": 0.36,
"grad_norm": 212226.21875,
"learning_rate": 3.840013292225894e-05,
"loss": 0.8172,
"step": 39000
},
{
"epoch": 0.36,
"grad_norm": 219878.765625,
"learning_rate": 3.812321154946739e-05,
"loss": 0.8476,
"step": 39500
},
{
"epoch": 0.37,
"grad_norm": 520633.34375,
"learning_rate": 3.7846290176675836e-05,
"loss": 0.8226,
"step": 40000
},
{
"epoch": 0.37,
"grad_norm": 317160.71875,
"learning_rate": 3.756936880388429e-05,
"loss": 0.8235,
"step": 40500
},
{
"epoch": 0.38,
"grad_norm": 409949.8125,
"learning_rate": 3.7292447431092734e-05,
"loss": 0.8112,
"step": 41000
},
{
"epoch": 0.38,
"grad_norm": 287649.84375,
"learning_rate": 3.701552605830118e-05,
"loss": 0.8357,
"step": 41500
},
{
"epoch": 0.39,
"grad_norm": 386562.09375,
"learning_rate": 3.673860468550963e-05,
"loss": 0.8425,
"step": 42000
},
{
"epoch": 0.39,
"grad_norm": 454850.125,
"learning_rate": 3.6461683312718076e-05,
"loss": 0.8347,
"step": 42500
},
{
"epoch": 0.4,
"grad_norm": 239857.5,
"learning_rate": 3.618476193992653e-05,
"loss": 0.8619,
"step": 43000
},
{
"epoch": 0.4,
"grad_norm": 227077.171875,
"learning_rate": 3.5907840567134974e-05,
"loss": 0.847,
"step": 43500
},
{
"epoch": 0.41,
"grad_norm": 465131.4375,
"learning_rate": 3.563091919434342e-05,
"loss": 0.8333,
"step": 44000
},
{
"epoch": 0.41,
"grad_norm": 329121.21875,
"learning_rate": 3.535399782155187e-05,
"loss": 0.8466,
"step": 44500
},
{
"epoch": 0.42,
"grad_norm": 362261.40625,
"learning_rate": 3.5077076448760316e-05,
"loss": 0.8076,
"step": 45000
},
{
"epoch": 0.42,
"grad_norm": 729279.75,
"learning_rate": 3.480015507596876e-05,
"loss": 0.8015,
"step": 45500
},
{
"epoch": 0.42,
"grad_norm": 311664.15625,
"learning_rate": 3.4523233703177214e-05,
"loss": 0.838,
"step": 46000
},
{
"epoch": 0.43,
"grad_norm": 267956.34375,
"learning_rate": 3.424631233038566e-05,
"loss": 0.8229,
"step": 46500
},
{
"epoch": 0.43,
"grad_norm": 393154.1875,
"learning_rate": 3.396939095759411e-05,
"loss": 0.7973,
"step": 47000
},
{
"epoch": 0.44,
"grad_norm": 298007.96875,
"learning_rate": 3.3692469584802556e-05,
"loss": 0.8309,
"step": 47500
},
{
"epoch": 0.44,
"grad_norm": 235954.09375,
"learning_rate": 3.3415548212011e-05,
"loss": 0.808,
"step": 48000
},
{
"epoch": 0.45,
"grad_norm": 565394.3125,
"learning_rate": 3.3138626839219454e-05,
"loss": 0.8123,
"step": 48500
},
{
"epoch": 0.45,
"grad_norm": 228396.78125,
"learning_rate": 3.28617054664279e-05,
"loss": 0.8456,
"step": 49000
},
{
"epoch": 0.46,
"grad_norm": 314704.09375,
"learning_rate": 3.258478409363635e-05,
"loss": 0.8116,
"step": 49500
},
{
"epoch": 0.46,
"grad_norm": 250648.609375,
"learning_rate": 3.2307862720844797e-05,
"loss": 0.8284,
"step": 50000
},
{
"epoch": 0.47,
"grad_norm": 409734.96875,
"learning_rate": 3.203094134805324e-05,
"loss": 0.8015,
"step": 50500
},
{
"epoch": 0.47,
"grad_norm": 171156.265625,
"learning_rate": 3.1754019975261694e-05,
"loss": 0.8322,
"step": 51000
},
{
"epoch": 0.48,
"grad_norm": 300950.65625,
"learning_rate": 3.147709860247014e-05,
"loss": 0.8215,
"step": 51500
},
{
"epoch": 0.48,
"grad_norm": 223661.734375,
"learning_rate": 3.120017722967859e-05,
"loss": 0.8004,
"step": 52000
},
{
"epoch": 0.48,
"grad_norm": 303242.25,
"learning_rate": 3.0923255856887037e-05,
"loss": 0.8109,
"step": 52500
},
{
"epoch": 0.49,
"grad_norm": 190445.625,
"learning_rate": 3.064633448409548e-05,
"loss": 0.8126,
"step": 53000
},
{
"epoch": 0.49,
"grad_norm": 312735.21875,
"learning_rate": 3.036941311130393e-05,
"loss": 0.8186,
"step": 53500
},
{
"epoch": 0.5,
"grad_norm": 492028.34375,
"learning_rate": 3.009249173851238e-05,
"loss": 0.7993,
"step": 54000
},
{
"epoch": 0.5,
"grad_norm": 159610.984375,
"learning_rate": 2.9815570365720828e-05,
"loss": 0.8209,
"step": 54500
},
{
"epoch": 0.51,
"grad_norm": 361054.71875,
"learning_rate": 2.9538648992929277e-05,
"loss": 0.8034,
"step": 55000
},
{
"epoch": 0.51,
"grad_norm": 333324.875,
"learning_rate": 2.9261727620137725e-05,
"loss": 0.7747,
"step": 55500
},
{
"epoch": 0.52,
"grad_norm": 220997.15625,
"learning_rate": 2.898480624734617e-05,
"loss": 0.7898,
"step": 56000
},
{
"epoch": 0.52,
"grad_norm": 427477.28125,
"learning_rate": 2.870788487455462e-05,
"loss": 0.8487,
"step": 56500
},
{
"epoch": 0.53,
"grad_norm": 494515.15625,
"learning_rate": 2.8430963501763068e-05,
"loss": 0.7867,
"step": 57000
},
{
"epoch": 0.53,
"grad_norm": 131962.8125,
"learning_rate": 2.8154042128971517e-05,
"loss": 0.8257,
"step": 57500
},
{
"epoch": 0.54,
"grad_norm": 360923.75,
"learning_rate": 2.7877120756179962e-05,
"loss": 0.824,
"step": 58000
},
{
"epoch": 0.54,
"grad_norm": 537102.3125,
"learning_rate": 2.760019938338841e-05,
"loss": 0.7947,
"step": 58500
},
{
"epoch": 0.54,
"grad_norm": 185466.890625,
"learning_rate": 2.732327801059686e-05,
"loss": 0.7965,
"step": 59000
},
{
"epoch": 0.55,
"grad_norm": 449036.0,
"learning_rate": 2.7046356637805308e-05,
"loss": 0.8359,
"step": 59500
},
{
"epoch": 0.55,
"grad_norm": 302378.875,
"learning_rate": 2.6769435265013757e-05,
"loss": 0.791,
"step": 60000
},
{
"epoch": 0.56,
"grad_norm": 170428.640625,
"learning_rate": 2.6492513892222202e-05,
"loss": 0.804,
"step": 60500
},
{
"epoch": 0.56,
"grad_norm": 486227.0,
"learning_rate": 2.621559251943065e-05,
"loss": 0.7807,
"step": 61000
},
{
"epoch": 0.57,
"grad_norm": 657373.0,
"learning_rate": 2.59386711466391e-05,
"loss": 0.8004,
"step": 61500
},
{
"epoch": 0.57,
"grad_norm": 386137.0625,
"learning_rate": 2.5661749773847548e-05,
"loss": 0.7669,
"step": 62000
},
{
"epoch": 0.58,
"grad_norm": 451425.8125,
"learning_rate": 2.5384828401055994e-05,
"loss": 0.7948,
"step": 62500
},
{
"epoch": 0.58,
"grad_norm": 188410.265625,
"learning_rate": 2.5107907028264442e-05,
"loss": 0.7988,
"step": 63000
},
{
"epoch": 0.59,
"grad_norm": 543696.4375,
"learning_rate": 2.483098565547289e-05,
"loss": 0.7927,
"step": 63500
},
{
"epoch": 0.59,
"grad_norm": 317845.90625,
"learning_rate": 2.455406428268134e-05,
"loss": 0.8021,
"step": 64000
},
{
"epoch": 0.6,
"grad_norm": 567647.5,
"learning_rate": 2.427714290988979e-05,
"loss": 0.7853,
"step": 64500
},
{
"epoch": 0.6,
"grad_norm": 413894.4375,
"learning_rate": 2.4000221537098234e-05,
"loss": 0.7973,
"step": 65000
},
{
"epoch": 0.6,
"grad_norm": 519043.9375,
"learning_rate": 2.3723300164306682e-05,
"loss": 0.7516,
"step": 65500
},
{
"epoch": 0.61,
"grad_norm": 129269.5234375,
"learning_rate": 2.344637879151513e-05,
"loss": 0.8054,
"step": 66000
},
{
"epoch": 0.61,
"grad_norm": 272380.0625,
"learning_rate": 2.316945741872358e-05,
"loss": 0.8112,
"step": 66500
},
{
"epoch": 0.62,
"grad_norm": 299374.875,
"learning_rate": 2.2892536045932025e-05,
"loss": 0.7687,
"step": 67000
},
{
"epoch": 0.62,
"grad_norm": 597694.5,
"learning_rate": 2.2615614673140474e-05,
"loss": 0.8072,
"step": 67500
},
{
"epoch": 0.63,
"grad_norm": 273690.375,
"learning_rate": 2.2338693300348922e-05,
"loss": 0.7895,
"step": 68000
},
{
"epoch": 0.63,
"grad_norm": 237268.21875,
"learning_rate": 2.206177192755737e-05,
"loss": 0.7802,
"step": 68500
},
{
"epoch": 0.64,
"grad_norm": 347646.09375,
"learning_rate": 2.178485055476582e-05,
"loss": 0.7991,
"step": 69000
},
{
"epoch": 0.64,
"grad_norm": 266270.28125,
"learning_rate": 2.1507929181974265e-05,
"loss": 0.7979,
"step": 69500
},
{
"epoch": 0.65,
"grad_norm": 371650.84375,
"learning_rate": 2.1231007809182714e-05,
"loss": 0.7616,
"step": 70000
},
{
"epoch": 0.65,
"grad_norm": 389250.75,
"learning_rate": 2.0954086436391163e-05,
"loss": 0.8056,
"step": 70500
},
{
"epoch": 0.66,
"grad_norm": 472425.71875,
"learning_rate": 2.067716506359961e-05,
"loss": 0.7632,
"step": 71000
},
{
"epoch": 0.66,
"grad_norm": 341248.3125,
"learning_rate": 2.0400243690808057e-05,
"loss": 0.7648,
"step": 71500
},
{
"epoch": 0.66,
"grad_norm": 470803.96875,
"learning_rate": 2.0123322318016505e-05,
"loss": 0.7699,
"step": 72000
},
{
"epoch": 0.67,
"grad_norm": 363643.71875,
"learning_rate": 1.9846400945224954e-05,
"loss": 0.7929,
"step": 72500
},
{
"epoch": 0.67,
"grad_norm": 380603.65625,
"learning_rate": 1.9569479572433403e-05,
"loss": 0.7614,
"step": 73000
},
{
"epoch": 0.68,
"grad_norm": 282191.78125,
"learning_rate": 1.929255819964185e-05,
"loss": 0.7508,
"step": 73500
},
{
"epoch": 0.68,
"grad_norm": 270537.5625,
"learning_rate": 1.9015636826850297e-05,
"loss": 0.7724,
"step": 74000
},
{
"epoch": 0.69,
"grad_norm": 786935.9375,
"learning_rate": 1.8738715454058745e-05,
"loss": 0.7903,
"step": 74500
},
{
"epoch": 0.69,
"grad_norm": 276354.875,
"learning_rate": 1.8461794081267194e-05,
"loss": 0.8034,
"step": 75000
},
{
"epoch": 0.7,
"grad_norm": 488051.25,
"learning_rate": 1.8184872708475643e-05,
"loss": 0.7988,
"step": 75500
},
{
"epoch": 0.7,
"grad_norm": 268592.53125,
"learning_rate": 1.7907951335684088e-05,
"loss": 0.771,
"step": 76000
},
{
"epoch": 0.71,
"grad_norm": 286408.90625,
"learning_rate": 1.7631029962892537e-05,
"loss": 0.7732,
"step": 76500
},
{
"epoch": 0.71,
"grad_norm": 287447.375,
"learning_rate": 1.7354108590100985e-05,
"loss": 0.7788,
"step": 77000
},
{
"epoch": 0.72,
"grad_norm": 427488.15625,
"learning_rate": 1.7077187217309434e-05,
"loss": 0.7763,
"step": 77500
},
{
"epoch": 0.72,
"grad_norm": 327204.96875,
"learning_rate": 1.6800265844517883e-05,
"loss": 0.7814,
"step": 78000
},
{
"epoch": 0.72,
"grad_norm": 446818.0,
"learning_rate": 1.6523344471726328e-05,
"loss": 0.773,
"step": 78500
},
{
"epoch": 0.73,
"grad_norm": 367178.46875,
"learning_rate": 1.6246423098934777e-05,
"loss": 0.7784,
"step": 79000
},
{
"epoch": 0.73,
"grad_norm": 564747.75,
"learning_rate": 1.5969501726143226e-05,
"loss": 0.7751,
"step": 79500
},
{
"epoch": 0.74,
"grad_norm": 373075.34375,
"learning_rate": 1.5692580353351674e-05,
"loss": 0.7701,
"step": 80000
},
{
"epoch": 0.74,
"grad_norm": 220253.890625,
"learning_rate": 1.541565898056012e-05,
"loss": 0.7717,
"step": 80500
},
{
"epoch": 0.75,
"grad_norm": 442242.46875,
"learning_rate": 1.513873760776857e-05,
"loss": 0.7699,
"step": 81000
},
{
"epoch": 0.75,
"grad_norm": 281603.1875,
"learning_rate": 1.4861816234977015e-05,
"loss": 0.7626,
"step": 81500
},
{
"epoch": 0.76,
"grad_norm": 420246.21875,
"learning_rate": 1.4584894862185464e-05,
"loss": 0.7719,
"step": 82000
},
{
"epoch": 0.76,
"grad_norm": 347264.375,
"learning_rate": 1.4307973489393911e-05,
"loss": 0.764,
"step": 82500
},
{
"epoch": 0.77,
"grad_norm": 326494.21875,
"learning_rate": 1.403105211660236e-05,
"loss": 0.7617,
"step": 83000
},
{
"epoch": 0.77,
"grad_norm": 193308.671875,
"learning_rate": 1.3754130743810807e-05,
"loss": 0.7599,
"step": 83500
},
{
"epoch": 0.78,
"grad_norm": 283088.8125,
"learning_rate": 1.3477209371019255e-05,
"loss": 0.7628,
"step": 84000
},
{
"epoch": 0.78,
"grad_norm": 539284.0,
"learning_rate": 1.3200287998227702e-05,
"loss": 0.7358,
"step": 84500
},
{
"epoch": 0.78,
"grad_norm": 473964.84375,
"learning_rate": 1.2923366625436151e-05,
"loss": 0.755,
"step": 85000
},
{
"epoch": 0.79,
"grad_norm": 327867.3125,
"learning_rate": 1.26464452526446e-05,
"loss": 0.7718,
"step": 85500
},
{
"epoch": 0.79,
"grad_norm": 263024.28125,
"learning_rate": 1.2369523879853047e-05,
"loss": 0.7722,
"step": 86000
},
{
"epoch": 0.8,
"grad_norm": 266528.5625,
"learning_rate": 1.2092602507061495e-05,
"loss": 0.7635,
"step": 86500
},
{
"epoch": 0.8,
"grad_norm": 126082.46875,
"learning_rate": 1.1815681134269942e-05,
"loss": 0.7508,
"step": 87000
},
{
"epoch": 0.81,
"grad_norm": 300010.3125,
"learning_rate": 1.1538759761478391e-05,
"loss": 0.7755,
"step": 87500
},
{
"epoch": 0.81,
"grad_norm": 471967.5625,
"learning_rate": 1.1261838388686838e-05,
"loss": 0.7416,
"step": 88000
},
{
"epoch": 0.82,
"grad_norm": 261213.0625,
"learning_rate": 1.0984917015895287e-05,
"loss": 0.7705,
"step": 88500
},
{
"epoch": 0.82,
"grad_norm": 192903.46875,
"learning_rate": 1.0707995643103734e-05,
"loss": 0.7392,
"step": 89000
},
{
"epoch": 0.83,
"grad_norm": 682632.8125,
"learning_rate": 1.0431074270312182e-05,
"loss": 0.7577,
"step": 89500
},
{
"epoch": 0.83,
"grad_norm": 218102.4375,
"learning_rate": 1.0154152897520631e-05,
"loss": 0.7586,
"step": 90000
},
{
"epoch": 0.84,
"grad_norm": 212267.5625,
"learning_rate": 9.877231524729078e-06,
"loss": 0.7472,
"step": 90500
},
{
"epoch": 0.84,
"grad_norm": 320534.59375,
"learning_rate": 9.600310151937527e-06,
"loss": 0.7602,
"step": 91000
},
{
"epoch": 0.84,
"grad_norm": 295994.96875,
"learning_rate": 9.323388779145974e-06,
"loss": 0.7368,
"step": 91500
},
{
"epoch": 0.85,
"grad_norm": 221153.6875,
"learning_rate": 9.046467406354423e-06,
"loss": 0.7492,
"step": 92000
},
{
"epoch": 0.85,
"grad_norm": 250322.6875,
"learning_rate": 8.76954603356287e-06,
"loss": 0.7582,
"step": 92500
},
{
"epoch": 0.86,
"grad_norm": 364772.46875,
"learning_rate": 8.492624660771318e-06,
"loss": 0.747,
"step": 93000
},
{
"epoch": 0.86,
"grad_norm": 226077.453125,
"learning_rate": 8.215703287979767e-06,
"loss": 0.7515,
"step": 93500
},
{
"epoch": 0.87,
"grad_norm": 260477.265625,
"learning_rate": 7.938781915188214e-06,
"loss": 0.7402,
"step": 94000
},
{
"epoch": 0.87,
"grad_norm": 380977.46875,
"learning_rate": 7.661860542396663e-06,
"loss": 0.7512,
"step": 94500
},
{
"epoch": 0.88,
"grad_norm": 1052145.875,
"learning_rate": 7.38493916960511e-06,
"loss": 0.7589,
"step": 95000
},
{
"epoch": 0.88,
"grad_norm": 213601.171875,
"learning_rate": 7.1080177968135575e-06,
"loss": 0.7581,
"step": 95500
},
{
"epoch": 0.89,
"grad_norm": 467877.71875,
"learning_rate": 6.831096424022006e-06,
"loss": 0.7464,
"step": 96000
},
{
"epoch": 0.89,
"grad_norm": 566204.3125,
"learning_rate": 6.554175051230454e-06,
"loss": 0.7557,
"step": 96500
},
{
"epoch": 0.9,
"grad_norm": 330177.5625,
"learning_rate": 6.277253678438902e-06,
"loss": 0.7304,
"step": 97000
},
{
"epoch": 0.9,
"grad_norm": 319206.65625,
"learning_rate": 6.00033230564735e-06,
"loss": 0.736,
"step": 97500
},
{
"epoch": 0.9,
"grad_norm": 410904.90625,
"learning_rate": 5.723410932855798e-06,
"loss": 0.752,
"step": 98000
},
{
"epoch": 0.91,
"grad_norm": 375357.15625,
"learning_rate": 5.4464895600642454e-06,
"loss": 0.7358,
"step": 98500
},
{
"epoch": 0.91,
"grad_norm": 307483.375,
"learning_rate": 5.169568187272693e-06,
"loss": 0.7749,
"step": 99000
},
{
"epoch": 0.92,
"grad_norm": 434196.59375,
"learning_rate": 4.892646814481141e-06,
"loss": 0.7324,
"step": 99500
},
{
"epoch": 0.92,
"grad_norm": 554119.4375,
"learning_rate": 4.61572544168959e-06,
"loss": 0.7096,
"step": 100000
},
{
"epoch": 0.93,
"grad_norm": 224006.78125,
"learning_rate": 4.338804068898038e-06,
"loss": 0.7205,
"step": 100500
},
{
"epoch": 0.93,
"grad_norm": 215164.78125,
"learning_rate": 4.0618826961064855e-06,
"loss": 0.732,
"step": 101000
},
{
"epoch": 0.94,
"grad_norm": 391820.53125,
"learning_rate": 3.7849613233149334e-06,
"loss": 0.7228,
"step": 101500
},
{
"epoch": 0.94,
"grad_norm": 306694.15625,
"learning_rate": 3.5080399505233816e-06,
"loss": 0.7507,
"step": 102000
},
{
"epoch": 0.95,
"grad_norm": 467995.03125,
"learning_rate": 3.2311185777318295e-06,
"loss": 0.7433,
"step": 102500
},
{
"epoch": 0.95,
"grad_norm": 284427.625,
"learning_rate": 2.9541972049402773e-06,
"loss": 0.7047,
"step": 103000
},
{
"epoch": 0.96,
"grad_norm": 193486.921875,
"learning_rate": 2.6772758321487256e-06,
"loss": 0.7646,
"step": 103500
},
{
"epoch": 0.96,
"grad_norm": 262856.15625,
"learning_rate": 2.4003544593571735e-06,
"loss": 0.7306,
"step": 104000
},
{
"epoch": 0.96,
"grad_norm": 267263.5,
"learning_rate": 2.1234330865656213e-06,
"loss": 0.7631,
"step": 104500
},
{
"epoch": 0.97,
"grad_norm": 229738.515625,
"learning_rate": 1.8465117137740692e-06,
"loss": 0.731,
"step": 105000
},
{
"epoch": 0.97,
"grad_norm": 528997.6875,
"learning_rate": 1.569590340982517e-06,
"loss": 0.7393,
"step": 105500
},
{
"epoch": 0.98,
"grad_norm": 346399.28125,
"learning_rate": 1.292668968190965e-06,
"loss": 0.7304,
"step": 106000
},
{
"epoch": 0.98,
"grad_norm": 315753.28125,
"learning_rate": 1.015747595399413e-06,
"loss": 0.7239,
"step": 106500
},
{
"epoch": 0.99,
"grad_norm": 245733.109375,
"learning_rate": 7.388262226078609e-07,
"loss": 0.7067,
"step": 107000
},
{
"epoch": 0.99,
"grad_norm": 423298.15625,
"learning_rate": 4.619048498163089e-07,
"loss": 0.734,
"step": 107500
},
{
"epoch": 1.0,
"grad_norm": 361011.8125,
"learning_rate": 1.8498347702475678e-07,
"loss": 0.7356,
"step": 108000
},
{
"epoch": 1.0,
"eval_accuracy": 0.68,
"eval_loss": 0.750106155872345,
"eval_runtime": 570.5256,
"eval_samples_per_second": 17.528,
"eval_steps_per_second": 2.922,
"step": 108334
}
],
"logging_steps": 500,
"max_steps": 108334,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 2.328341808688051e+18,
"train_batch_size": 6,
"trial_name": null,
"trial_params": null
}