model_ablation_no_priority / trainer_state.json
nancyH's picture
Upload folder using huggingface_hub
ff59e85 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 28.320558806871816,
"eval_steps": 500,
"global_step": 100000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.28,
"learning_rate": 4.000000000000001e-06,
"loss": 7.4059,
"step": 1000
},
{
"epoch": 0.57,
"learning_rate": 8.000000000000001e-06,
"loss": 6.7738,
"step": 2000
},
{
"epoch": 0.85,
"learning_rate": 1.2e-05,
"loss": 6.5767,
"step": 3000
},
{
"epoch": 1.13,
"learning_rate": 1.6000000000000003e-05,
"loss": 6.4861,
"step": 4000
},
{
"epoch": 1.42,
"learning_rate": 2e-05,
"loss": 6.4545,
"step": 5000
},
{
"epoch": 1.7,
"learning_rate": 2.4e-05,
"loss": 6.4371,
"step": 6000
},
{
"epoch": 1.98,
"learning_rate": 2.8e-05,
"loss": 6.4229,
"step": 7000
},
{
"epoch": 2.27,
"learning_rate": 3.2000000000000005e-05,
"loss": 6.4124,
"step": 8000
},
{
"epoch": 2.55,
"learning_rate": 3.6e-05,
"loss": 6.4036,
"step": 9000
},
{
"epoch": 2.83,
"learning_rate": 4e-05,
"loss": 6.3949,
"step": 10000
},
{
"epoch": 3.11,
"learning_rate": 3.955555555555556e-05,
"loss": 6.3881,
"step": 11000
},
{
"epoch": 3.4,
"learning_rate": 3.9111111111111115e-05,
"loss": 6.3816,
"step": 12000
},
{
"epoch": 3.68,
"learning_rate": 3.866666666666667e-05,
"loss": 6.3782,
"step": 13000
},
{
"epoch": 3.96,
"learning_rate": 3.8222222222222226e-05,
"loss": 6.3737,
"step": 14000
},
{
"epoch": 4.25,
"learning_rate": 3.777777777777778e-05,
"loss": 6.3709,
"step": 15000
},
{
"epoch": 4.53,
"learning_rate": 3.733333333333334e-05,
"loss": 6.3688,
"step": 16000
},
{
"epoch": 4.81,
"learning_rate": 3.6888888888888896e-05,
"loss": 6.366,
"step": 17000
},
{
"epoch": 5.1,
"learning_rate": 3.644444444444445e-05,
"loss": 6.3647,
"step": 18000
},
{
"epoch": 5.38,
"learning_rate": 3.6e-05,
"loss": 6.3626,
"step": 19000
},
{
"epoch": 5.66,
"learning_rate": 3.555555555555555e-05,
"loss": 6.3604,
"step": 20000
},
{
"epoch": 5.95,
"learning_rate": 3.511111111111111e-05,
"loss": 6.3601,
"step": 21000
},
{
"epoch": 6.23,
"learning_rate": 3.466666666666667e-05,
"loss": 6.3572,
"step": 22000
},
{
"epoch": 6.51,
"learning_rate": 3.4222222222222224e-05,
"loss": 6.3567,
"step": 23000
},
{
"epoch": 6.8,
"learning_rate": 3.377777777777778e-05,
"loss": 6.3569,
"step": 24000
},
{
"epoch": 7.08,
"learning_rate": 3.3333333333333335e-05,
"loss": 6.3555,
"step": 25000
},
{
"epoch": 7.36,
"learning_rate": 3.288888888888889e-05,
"loss": 6.3541,
"step": 26000
},
{
"epoch": 7.65,
"learning_rate": 3.2444444444444446e-05,
"loss": 6.353,
"step": 27000
},
{
"epoch": 7.93,
"learning_rate": 3.2000000000000005e-05,
"loss": 6.352,
"step": 28000
},
{
"epoch": 8.21,
"learning_rate": 3.155555555555556e-05,
"loss": 6.3374,
"step": 29000
},
{
"epoch": 8.5,
"learning_rate": 3.111111111111112e-05,
"loss": 6.274,
"step": 30000
},
{
"epoch": 8.78,
"learning_rate": 3.066666666666667e-05,
"loss": 6.2403,
"step": 31000
},
{
"epoch": 9.06,
"learning_rate": 3.0222222222222225e-05,
"loss": 6.219,
"step": 32000
},
{
"epoch": 9.35,
"learning_rate": 2.977777777777778e-05,
"loss": 6.194,
"step": 33000
},
{
"epoch": 9.63,
"learning_rate": 2.9333333333333333e-05,
"loss": 6.1634,
"step": 34000
},
{
"epoch": 9.91,
"learning_rate": 2.888888888888889e-05,
"loss": 6.136,
"step": 35000
},
{
"epoch": 10.2,
"learning_rate": 2.8444444444444447e-05,
"loss": 6.0914,
"step": 36000
},
{
"epoch": 10.48,
"learning_rate": 2.8e-05,
"loss": 6.0379,
"step": 37000
},
{
"epoch": 10.76,
"learning_rate": 2.755555555555556e-05,
"loss": 5.9794,
"step": 38000
},
{
"epoch": 11.04,
"learning_rate": 2.7111111111111114e-05,
"loss": 5.9123,
"step": 39000
},
{
"epoch": 11.33,
"learning_rate": 2.6666666666666667e-05,
"loss": 5.8473,
"step": 40000
},
{
"epoch": 11.61,
"learning_rate": 2.6222222222222226e-05,
"loss": 5.761,
"step": 41000
},
{
"epoch": 11.89,
"learning_rate": 2.577777777777778e-05,
"loss": 5.6793,
"step": 42000
},
{
"epoch": 12.18,
"learning_rate": 2.5333333333333334e-05,
"loss": 5.6205,
"step": 43000
},
{
"epoch": 12.46,
"learning_rate": 2.4888888888888893e-05,
"loss": 5.5763,
"step": 44000
},
{
"epoch": 12.74,
"learning_rate": 2.444444444444445e-05,
"loss": 5.5355,
"step": 45000
},
{
"epoch": 13.03,
"learning_rate": 2.4e-05,
"loss": 5.5028,
"step": 46000
},
{
"epoch": 13.31,
"learning_rate": 2.355555555555556e-05,
"loss": 5.4741,
"step": 47000
},
{
"epoch": 13.59,
"learning_rate": 2.3111111111111112e-05,
"loss": 5.4451,
"step": 48000
},
{
"epoch": 13.88,
"learning_rate": 2.2666666666666668e-05,
"loss": 5.4244,
"step": 49000
},
{
"epoch": 14.16,
"learning_rate": 2.2222222222222227e-05,
"loss": 5.4003,
"step": 50000
},
{
"epoch": 14.44,
"learning_rate": 2.177777777777778e-05,
"loss": 5.3802,
"step": 51000
},
{
"epoch": 14.73,
"learning_rate": 2.1333333333333335e-05,
"loss": 5.3606,
"step": 52000
},
{
"epoch": 15.01,
"learning_rate": 2.088888888888889e-05,
"loss": 5.3435,
"step": 53000
},
{
"epoch": 15.29,
"learning_rate": 2.0444444444444446e-05,
"loss": 5.3308,
"step": 54000
},
{
"epoch": 15.58,
"learning_rate": 2e-05,
"loss": 5.3103,
"step": 55000
},
{
"epoch": 15.86,
"learning_rate": 1.9555555555555557e-05,
"loss": 5.2965,
"step": 56000
},
{
"epoch": 16.14,
"learning_rate": 1.9111111111111113e-05,
"loss": 5.2886,
"step": 57000
},
{
"epoch": 16.43,
"learning_rate": 1.866666666666667e-05,
"loss": 5.2752,
"step": 58000
},
{
"epoch": 16.71,
"learning_rate": 1.8222222222222224e-05,
"loss": 5.2649,
"step": 59000
},
{
"epoch": 16.99,
"learning_rate": 1.7777777777777777e-05,
"loss": 5.2527,
"step": 60000
},
{
"epoch": 17.27,
"learning_rate": 1.7333333333333336e-05,
"loss": 5.2436,
"step": 61000
},
{
"epoch": 17.56,
"learning_rate": 1.688888888888889e-05,
"loss": 5.2318,
"step": 62000
},
{
"epoch": 17.84,
"learning_rate": 1.6444444444444444e-05,
"loss": 5.2264,
"step": 63000
},
{
"epoch": 18.12,
"learning_rate": 1.6000000000000003e-05,
"loss": 5.218,
"step": 64000
},
{
"epoch": 18.41,
"learning_rate": 1.555555555555556e-05,
"loss": 5.2067,
"step": 65000
},
{
"epoch": 18.69,
"learning_rate": 1.5111111111111112e-05,
"loss": 5.2002,
"step": 66000
},
{
"epoch": 18.97,
"learning_rate": 1.4666666666666666e-05,
"loss": 5.1996,
"step": 67000
},
{
"epoch": 19.26,
"learning_rate": 1.4222222222222224e-05,
"loss": 5.1862,
"step": 68000
},
{
"epoch": 19.54,
"learning_rate": 1.377777777777778e-05,
"loss": 5.1821,
"step": 69000
},
{
"epoch": 19.82,
"learning_rate": 1.3333333333333333e-05,
"loss": 5.1776,
"step": 70000
},
{
"epoch": 20.11,
"learning_rate": 1.288888888888889e-05,
"loss": 5.1709,
"step": 71000
},
{
"epoch": 20.39,
"learning_rate": 1.2444444444444446e-05,
"loss": 5.1644,
"step": 72000
},
{
"epoch": 20.67,
"learning_rate": 1.2e-05,
"loss": 5.1592,
"step": 73000
},
{
"epoch": 20.96,
"learning_rate": 1.1555555555555556e-05,
"loss": 5.158,
"step": 74000
},
{
"epoch": 21.24,
"learning_rate": 1.1111111111111113e-05,
"loss": 5.1505,
"step": 75000
},
{
"epoch": 21.52,
"learning_rate": 1.0666666666666667e-05,
"loss": 5.1463,
"step": 76000
},
{
"epoch": 21.81,
"learning_rate": 1.0222222222222223e-05,
"loss": 5.1434,
"step": 77000
},
{
"epoch": 22.09,
"learning_rate": 9.777777777777779e-06,
"loss": 5.1396,
"step": 78000
},
{
"epoch": 22.37,
"learning_rate": 9.333333333333334e-06,
"loss": 5.1338,
"step": 79000
},
{
"epoch": 22.66,
"learning_rate": 8.888888888888888e-06,
"loss": 5.1323,
"step": 80000
},
{
"epoch": 22.94,
"learning_rate": 8.444444444444446e-06,
"loss": 5.1303,
"step": 81000
},
{
"epoch": 23.22,
"learning_rate": 8.000000000000001e-06,
"loss": 5.1224,
"step": 82000
},
{
"epoch": 23.51,
"learning_rate": 7.555555555555556e-06,
"loss": 5.1214,
"step": 83000
},
{
"epoch": 23.79,
"learning_rate": 7.111111111111112e-06,
"loss": 5.1226,
"step": 84000
},
{
"epoch": 24.07,
"learning_rate": 6.666666666666667e-06,
"loss": 5.1185,
"step": 85000
},
{
"epoch": 24.36,
"learning_rate": 6.222222222222223e-06,
"loss": 5.1089,
"step": 86000
},
{
"epoch": 24.64,
"learning_rate": 5.777777777777778e-06,
"loss": 5.111,
"step": 87000
},
{
"epoch": 24.92,
"learning_rate": 5.333333333333334e-06,
"loss": 5.1099,
"step": 88000
},
{
"epoch": 25.2,
"learning_rate": 4.888888888888889e-06,
"loss": 5.1088,
"step": 89000
},
{
"epoch": 25.49,
"learning_rate": 4.444444444444444e-06,
"loss": 5.1057,
"step": 90000
},
{
"epoch": 25.77,
"learning_rate": 4.000000000000001e-06,
"loss": 5.1054,
"step": 91000
},
{
"epoch": 26.05,
"learning_rate": 3.555555555555556e-06,
"loss": 5.0996,
"step": 92000
},
{
"epoch": 26.34,
"learning_rate": 3.1111111111111116e-06,
"loss": 5.1013,
"step": 93000
},
{
"epoch": 26.62,
"learning_rate": 2.666666666666667e-06,
"loss": 5.1,
"step": 94000
},
{
"epoch": 26.9,
"learning_rate": 2.222222222222222e-06,
"loss": 5.0977,
"step": 95000
},
{
"epoch": 27.19,
"learning_rate": 1.777777777777778e-06,
"loss": 5.0966,
"step": 96000
},
{
"epoch": 27.47,
"learning_rate": 1.3333333333333334e-06,
"loss": 5.098,
"step": 97000
},
{
"epoch": 27.75,
"learning_rate": 8.88888888888889e-07,
"loss": 5.0919,
"step": 98000
},
{
"epoch": 28.04,
"learning_rate": 4.444444444444445e-07,
"loss": 5.0967,
"step": 99000
},
{
"epoch": 28.32,
"learning_rate": 0.0,
"loss": 5.0951,
"step": 100000
}
],
"logging_steps": 1000,
"max_steps": 100000,
"num_train_epochs": 29,
"save_steps": 1000,
"total_flos": 7.577756588702695e+18,
"trial_name": null,
"trial_params": null
}