base_5120 / trainer_state.json
nancyH's picture
Upload folder using huggingface_hub
21666b1 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 80.77544426494346,
"eval_steps": 500,
"global_step": 100000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.81,
"learning_rate": 4.000000000000001e-06,
"loss": 7.87,
"step": 1000
},
{
"epoch": 1.62,
"learning_rate": 8.000000000000001e-06,
"loss": 6.8831,
"step": 2000
},
{
"epoch": 2.42,
"learning_rate": 1.2e-05,
"loss": 6.7256,
"step": 3000
},
{
"epoch": 3.23,
"learning_rate": 1.6000000000000003e-05,
"loss": 6.6605,
"step": 4000
},
{
"epoch": 4.04,
"learning_rate": 2e-05,
"loss": 6.6243,
"step": 5000
},
{
"epoch": 4.85,
"learning_rate": 2.4e-05,
"loss": 6.6033,
"step": 6000
},
{
"epoch": 5.65,
"learning_rate": 2.8e-05,
"loss": 6.5906,
"step": 7000
},
{
"epoch": 6.46,
"learning_rate": 3.2000000000000005e-05,
"loss": 6.5772,
"step": 8000
},
{
"epoch": 7.27,
"learning_rate": 3.6e-05,
"loss": 6.5718,
"step": 9000
},
{
"epoch": 8.08,
"learning_rate": 4e-05,
"loss": 6.5653,
"step": 10000
},
{
"epoch": 8.89,
"learning_rate": 3.955555555555556e-05,
"loss": 6.5602,
"step": 11000
},
{
"epoch": 9.69,
"learning_rate": 3.9111111111111115e-05,
"loss": 6.5572,
"step": 12000
},
{
"epoch": 10.5,
"learning_rate": 3.866666666666667e-05,
"loss": 6.5534,
"step": 13000
},
{
"epoch": 11.31,
"learning_rate": 3.8222222222222226e-05,
"loss": 6.5533,
"step": 14000
},
{
"epoch": 12.12,
"learning_rate": 3.777777777777778e-05,
"loss": 6.547,
"step": 15000
},
{
"epoch": 12.92,
"learning_rate": 3.733333333333334e-05,
"loss": 6.5463,
"step": 16000
},
{
"epoch": 13.73,
"learning_rate": 3.6888888888888896e-05,
"loss": 6.5437,
"step": 17000
},
{
"epoch": 14.54,
"learning_rate": 3.644444444444445e-05,
"loss": 6.5322,
"step": 18000
},
{
"epoch": 15.35,
"learning_rate": 3.6e-05,
"loss": 6.4474,
"step": 19000
},
{
"epoch": 16.16,
"learning_rate": 3.555555555555555e-05,
"loss": 6.3968,
"step": 20000
},
{
"epoch": 16.96,
"learning_rate": 3.511111111111111e-05,
"loss": 6.3516,
"step": 21000
},
{
"epoch": 17.77,
"learning_rate": 3.466666666666667e-05,
"loss": 6.311,
"step": 22000
},
{
"epoch": 18.58,
"learning_rate": 3.4222222222222224e-05,
"loss": 6.2707,
"step": 23000
},
{
"epoch": 19.39,
"learning_rate": 3.377777777777778e-05,
"loss": 6.2294,
"step": 24000
},
{
"epoch": 20.19,
"learning_rate": 3.3333333333333335e-05,
"loss": 6.1822,
"step": 25000
},
{
"epoch": 21.0,
"learning_rate": 3.288888888888889e-05,
"loss": 6.1128,
"step": 26000
},
{
"epoch": 21.81,
"learning_rate": 3.2444444444444446e-05,
"loss": 6.0262,
"step": 27000
},
{
"epoch": 22.62,
"learning_rate": 3.2000000000000005e-05,
"loss": 5.9326,
"step": 28000
},
{
"epoch": 23.42,
"learning_rate": 3.155555555555556e-05,
"loss": 5.8051,
"step": 29000
},
{
"epoch": 24.23,
"learning_rate": 3.111111111111112e-05,
"loss": 5.703,
"step": 30000
},
{
"epoch": 25.04,
"learning_rate": 3.066666666666667e-05,
"loss": 5.6361,
"step": 31000
},
{
"epoch": 25.85,
"learning_rate": 3.0222222222222225e-05,
"loss": 5.5885,
"step": 32000
},
{
"epoch": 26.66,
"learning_rate": 2.977777777777778e-05,
"loss": 5.5487,
"step": 33000
},
{
"epoch": 27.46,
"learning_rate": 2.9333333333333333e-05,
"loss": 5.5163,
"step": 34000
},
{
"epoch": 28.27,
"learning_rate": 2.888888888888889e-05,
"loss": 5.4922,
"step": 35000
},
{
"epoch": 29.08,
"learning_rate": 2.8444444444444447e-05,
"loss": 5.4659,
"step": 36000
},
{
"epoch": 29.89,
"learning_rate": 2.8e-05,
"loss": 5.4453,
"step": 37000
},
{
"epoch": 30.69,
"learning_rate": 2.755555555555556e-05,
"loss": 5.4257,
"step": 38000
},
{
"epoch": 31.5,
"learning_rate": 2.7111111111111114e-05,
"loss": 5.4111,
"step": 39000
},
{
"epoch": 32.31,
"learning_rate": 2.6666666666666667e-05,
"loss": 5.3939,
"step": 40000
},
{
"epoch": 33.12,
"learning_rate": 2.6222222222222226e-05,
"loss": 5.3814,
"step": 41000
},
{
"epoch": 33.93,
"learning_rate": 2.577777777777778e-05,
"loss": 5.3658,
"step": 42000
},
{
"epoch": 34.73,
"learning_rate": 2.5333333333333334e-05,
"loss": 5.353,
"step": 43000
},
{
"epoch": 35.54,
"learning_rate": 2.4888888888888893e-05,
"loss": 5.3419,
"step": 44000
},
{
"epoch": 36.35,
"learning_rate": 2.444444444444445e-05,
"loss": 5.3312,
"step": 45000
},
{
"epoch": 37.16,
"learning_rate": 2.4e-05,
"loss": 5.3217,
"step": 46000
},
{
"epoch": 37.96,
"learning_rate": 2.355555555555556e-05,
"loss": 5.3111,
"step": 47000
},
{
"epoch": 38.77,
"learning_rate": 2.3111111111111112e-05,
"loss": 5.3013,
"step": 48000
},
{
"epoch": 39.58,
"learning_rate": 2.2666666666666668e-05,
"loss": 5.291,
"step": 49000
},
{
"epoch": 40.39,
"learning_rate": 2.2222222222222227e-05,
"loss": 5.2835,
"step": 50000
},
{
"epoch": 41.2,
"learning_rate": 2.177777777777778e-05,
"loss": 5.2753,
"step": 51000
},
{
"epoch": 42.0,
"learning_rate": 2.1333333333333335e-05,
"loss": 5.2666,
"step": 52000
},
{
"epoch": 42.81,
"learning_rate": 2.088888888888889e-05,
"loss": 5.2594,
"step": 53000
},
{
"epoch": 43.62,
"learning_rate": 2.0444444444444446e-05,
"loss": 5.2513,
"step": 54000
},
{
"epoch": 44.43,
"learning_rate": 2e-05,
"loss": 5.2441,
"step": 55000
},
{
"epoch": 45.23,
"learning_rate": 1.9555555555555557e-05,
"loss": 5.2426,
"step": 56000
},
{
"epoch": 46.04,
"learning_rate": 1.9111111111111113e-05,
"loss": 5.2328,
"step": 57000
},
{
"epoch": 46.85,
"learning_rate": 1.866666666666667e-05,
"loss": 5.2278,
"step": 58000
},
{
"epoch": 47.66,
"learning_rate": 1.8222222222222224e-05,
"loss": 5.2213,
"step": 59000
},
{
"epoch": 48.47,
"learning_rate": 1.7777777777777777e-05,
"loss": 5.2155,
"step": 60000
},
{
"epoch": 49.27,
"learning_rate": 1.7333333333333336e-05,
"loss": 5.2102,
"step": 61000
},
{
"epoch": 50.08,
"learning_rate": 1.688888888888889e-05,
"loss": 5.2048,
"step": 62000
},
{
"epoch": 50.89,
"learning_rate": 1.6444444444444444e-05,
"loss": 5.2008,
"step": 63000
},
{
"epoch": 51.7,
"learning_rate": 1.6000000000000003e-05,
"loss": 5.1975,
"step": 64000
},
{
"epoch": 52.5,
"learning_rate": 1.555555555555556e-05,
"loss": 5.1925,
"step": 65000
},
{
"epoch": 53.31,
"learning_rate": 1.5111111111111112e-05,
"loss": 5.1883,
"step": 66000
},
{
"epoch": 54.12,
"learning_rate": 1.4666666666666666e-05,
"loss": 5.1822,
"step": 67000
},
{
"epoch": 54.93,
"learning_rate": 1.4222222222222224e-05,
"loss": 5.1806,
"step": 68000
},
{
"epoch": 55.74,
"learning_rate": 1.377777777777778e-05,
"loss": 5.1756,
"step": 69000
},
{
"epoch": 56.54,
"learning_rate": 1.3333333333333333e-05,
"loss": 5.175,
"step": 70000
},
{
"epoch": 57.35,
"learning_rate": 1.288888888888889e-05,
"loss": 5.1686,
"step": 71000
},
{
"epoch": 58.16,
"learning_rate": 1.2444444444444446e-05,
"loss": 5.1672,
"step": 72000
},
{
"epoch": 58.97,
"learning_rate": 1.2e-05,
"loss": 5.1644,
"step": 73000
},
{
"epoch": 59.77,
"learning_rate": 1.1555555555555556e-05,
"loss": 5.1609,
"step": 74000
},
{
"epoch": 60.58,
"learning_rate": 1.1111111111111113e-05,
"loss": 5.1582,
"step": 75000
},
{
"epoch": 61.39,
"learning_rate": 1.0666666666666667e-05,
"loss": 5.1539,
"step": 76000
},
{
"epoch": 62.2,
"learning_rate": 1.0222222222222223e-05,
"loss": 5.1535,
"step": 77000
},
{
"epoch": 63.0,
"learning_rate": 9.777777777777779e-06,
"loss": 5.1516,
"step": 78000
},
{
"epoch": 63.81,
"learning_rate": 9.333333333333334e-06,
"loss": 5.1479,
"step": 79000
},
{
"epoch": 64.62,
"learning_rate": 8.888888888888888e-06,
"loss": 5.1451,
"step": 80000
},
{
"epoch": 65.43,
"learning_rate": 8.444444444444446e-06,
"loss": 5.1439,
"step": 81000
},
{
"epoch": 66.24,
"learning_rate": 8.000000000000001e-06,
"loss": 5.1429,
"step": 82000
},
{
"epoch": 67.04,
"learning_rate": 7.555555555555556e-06,
"loss": 5.1402,
"step": 83000
},
{
"epoch": 67.85,
"learning_rate": 7.111111111111112e-06,
"loss": 5.1375,
"step": 84000
},
{
"epoch": 68.66,
"learning_rate": 6.666666666666667e-06,
"loss": 5.1348,
"step": 85000
},
{
"epoch": 69.47,
"learning_rate": 6.222222222222223e-06,
"loss": 5.1357,
"step": 86000
},
{
"epoch": 70.27,
"learning_rate": 5.777777777777778e-06,
"loss": 5.1336,
"step": 87000
},
{
"epoch": 71.08,
"learning_rate": 5.333333333333334e-06,
"loss": 5.1308,
"step": 88000
},
{
"epoch": 71.89,
"learning_rate": 4.888888888888889e-06,
"loss": 5.1304,
"step": 89000
},
{
"epoch": 72.7,
"learning_rate": 4.444444444444444e-06,
"loss": 5.1285,
"step": 90000
},
{
"epoch": 73.51,
"learning_rate": 4.000000000000001e-06,
"loss": 5.1272,
"step": 91000
},
{
"epoch": 74.31,
"learning_rate": 3.555555555555556e-06,
"loss": 5.1272,
"step": 92000
},
{
"epoch": 75.12,
"learning_rate": 3.1111111111111116e-06,
"loss": 5.1281,
"step": 93000
},
{
"epoch": 75.93,
"learning_rate": 2.666666666666667e-06,
"loss": 5.1257,
"step": 94000
},
{
"epoch": 76.74,
"learning_rate": 2.222222222222222e-06,
"loss": 5.1245,
"step": 95000
},
{
"epoch": 77.54,
"learning_rate": 1.777777777777778e-06,
"loss": 5.1244,
"step": 96000
},
{
"epoch": 78.35,
"learning_rate": 1.3333333333333334e-06,
"loss": 5.1229,
"step": 97000
},
{
"epoch": 79.16,
"learning_rate": 8.88888888888889e-07,
"loss": 5.1223,
"step": 98000
},
{
"epoch": 79.97,
"learning_rate": 4.444444444444445e-07,
"loss": 5.1231,
"step": 99000
},
{
"epoch": 80.78,
"learning_rate": 0.0,
"loss": 5.1203,
"step": 100000
},
{
"epoch": 80.78,
"step": 100000,
"total_flos": 2.0208137237272986e+19,
"train_loss": 5.605590717773437,
"train_runtime": 84499.2622,
"train_samples_per_second": 908.884,
"train_steps_per_second": 1.183
}
],
"logging_steps": 1000,
"max_steps": 100000,
"num_train_epochs": 81,
"save_steps": 1000,
"total_flos": 2.0208137237272986e+19,
"trial_name": null,
"trial_params": null
}