| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 59.388888888888886, | |
| "eval_steps": 500, | |
| "global_step": 420, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 5.555555555555556e-06, | |
| "loss": 1.7272, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "loss": 1.9664, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 1.8802, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 1.641, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 1.9065, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 1.5674, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 3.888888888888889e-05, | |
| "loss": 1.7742, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 1.6901, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 5e-05, | |
| "loss": 1.4911, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 5.555555555555556e-05, | |
| "loss": 1.6228, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 6.111111111111112e-05, | |
| "loss": 1.4569, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 1.1288, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 7.222222222222222e-05, | |
| "loss": 1.3788, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 7.777777777777778e-05, | |
| "loss": 1.3019, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 8.333333333333334e-05, | |
| "loss": 1.1974, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 8.888888888888889e-05, | |
| "loss": 1.2544, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 9.444444444444444e-05, | |
| "loss": 1.0393, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 0.0001, | |
| "loss": 1.0955, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 0.00010555555555555557, | |
| "loss": 0.9618, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 0.00011111111111111112, | |
| "loss": 0.7707, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 0.00011666666666666668, | |
| "loss": 0.7415, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 0.00012222222222222224, | |
| "loss": 0.5368, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 9.33, | |
| "learning_rate": 0.00012777777777777776, | |
| "loss": 0.741, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 10.11, | |
| "learning_rate": 0.00013333333333333334, | |
| "loss": 0.5261, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 10.28, | |
| "learning_rate": 0.0001388888888888889, | |
| "loss": 0.5543, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 11.06, | |
| "learning_rate": 0.00014444444444444444, | |
| "loss": 0.3309, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 11.22, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 0.3979, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 11.39, | |
| "learning_rate": 0.00015555555555555556, | |
| "loss": 0.3294, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 12.17, | |
| "learning_rate": 0.0001611111111111111, | |
| "loss": 0.3013, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 12.33, | |
| "learning_rate": 0.0001666666666666667, | |
| "loss": 0.3047, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 13.11, | |
| "learning_rate": 0.00017222222222222224, | |
| "loss": 0.2221, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 13.28, | |
| "learning_rate": 0.00017777777777777779, | |
| "loss": 0.2132, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 14.06, | |
| "learning_rate": 0.00018333333333333334, | |
| "loss": 0.175, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 14.22, | |
| "learning_rate": 0.00018888888888888888, | |
| "loss": 0.1485, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 14.39, | |
| "learning_rate": 0.00019444444444444446, | |
| "loss": 0.1709, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 15.17, | |
| "learning_rate": 0.0002, | |
| "loss": 0.1427, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 15.33, | |
| "learning_rate": 0.00019938271604938272, | |
| "loss": 0.1162, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 16.11, | |
| "learning_rate": 0.00019876543209876543, | |
| "loss": 0.1075, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 16.28, | |
| "learning_rate": 0.00019814814814814814, | |
| "loss": 0.0965, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 17.06, | |
| "learning_rate": 0.00019753086419753085, | |
| "loss": 0.0955, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 17.22, | |
| "learning_rate": 0.0001969135802469136, | |
| "loss": 0.0706, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 17.39, | |
| "learning_rate": 0.0001962962962962963, | |
| "loss": 0.0645, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 18.17, | |
| "learning_rate": 0.000195679012345679, | |
| "loss": 0.042, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 18.33, | |
| "learning_rate": 0.00019506172839506175, | |
| "loss": 0.0479, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 19.11, | |
| "learning_rate": 0.00019444444444444446, | |
| "loss": 0.0258, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 19.28, | |
| "learning_rate": 0.00019382716049382717, | |
| "loss": 0.0426, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 20.06, | |
| "learning_rate": 0.00019320987654320988, | |
| "loss": 0.0314, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 20.22, | |
| "learning_rate": 0.0001925925925925926, | |
| "loss": 0.0235, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 20.39, | |
| "learning_rate": 0.00019197530864197533, | |
| "loss": 0.0359, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 21.17, | |
| "learning_rate": 0.00019135802469135804, | |
| "loss": 0.0218, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 21.33, | |
| "learning_rate": 0.00019074074074074075, | |
| "loss": 0.0279, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 22.11, | |
| "learning_rate": 0.00019012345679012346, | |
| "loss": 0.019, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 22.28, | |
| "learning_rate": 0.00018950617283950617, | |
| "loss": 0.0212, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 23.06, | |
| "learning_rate": 0.00018888888888888888, | |
| "loss": 0.0201, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 23.22, | |
| "learning_rate": 0.0001882716049382716, | |
| "loss": 0.0133, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 23.39, | |
| "learning_rate": 0.00018765432098765433, | |
| "loss": 0.0194, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 24.17, | |
| "learning_rate": 0.00018703703703703704, | |
| "loss": 0.0143, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 24.33, | |
| "learning_rate": 0.00018641975308641978, | |
| "loss": 0.0118, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 25.11, | |
| "learning_rate": 0.0001858024691358025, | |
| "loss": 0.0135, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 25.28, | |
| "learning_rate": 0.0001851851851851852, | |
| "loss": 0.0105, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 26.06, | |
| "learning_rate": 0.00018456790123456791, | |
| "loss": 0.0125, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 26.22, | |
| "learning_rate": 0.00018395061728395062, | |
| "loss": 0.012, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 26.39, | |
| "learning_rate": 0.00018333333333333334, | |
| "loss": 0.012, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 27.17, | |
| "learning_rate": 0.00018271604938271605, | |
| "loss": 0.0114, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 27.33, | |
| "learning_rate": 0.00018209876543209878, | |
| "loss": 0.0105, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 28.11, | |
| "learning_rate": 0.0001814814814814815, | |
| "loss": 0.0115, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 28.28, | |
| "learning_rate": 0.0001808641975308642, | |
| "loss": 0.0077, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 29.06, | |
| "learning_rate": 0.00018024691358024692, | |
| "loss": 0.0116, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 29.22, | |
| "learning_rate": 0.00017962962962962963, | |
| "loss": 0.0092, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 29.39, | |
| "learning_rate": 0.00017901234567901234, | |
| "loss": 0.0105, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 30.17, | |
| "learning_rate": 0.00017839506172839508, | |
| "loss": 0.0113, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 30.33, | |
| "learning_rate": 0.00017777777777777779, | |
| "loss": 0.0094, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 31.11, | |
| "learning_rate": 0.00017716049382716052, | |
| "loss": 0.0092, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 31.28, | |
| "learning_rate": 0.00017654320987654323, | |
| "loss": 0.0079, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 32.06, | |
| "learning_rate": 0.00017592592592592595, | |
| "loss": 0.0089, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 32.22, | |
| "learning_rate": 0.00017530864197530866, | |
| "loss": 0.0085, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 32.39, | |
| "learning_rate": 0.00017469135802469137, | |
| "loss": 0.0081, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 33.17, | |
| "learning_rate": 0.00017407407407407408, | |
| "loss": 0.0076, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 33.33, | |
| "learning_rate": 0.0001734567901234568, | |
| "loss": 0.0059, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 34.11, | |
| "learning_rate": 0.0001728395061728395, | |
| "loss": 0.0078, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 34.28, | |
| "learning_rate": 0.00017222222222222224, | |
| "loss": 0.0039, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 35.06, | |
| "learning_rate": 0.00017160493827160495, | |
| "loss": 0.0078, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 35.22, | |
| "learning_rate": 0.00017098765432098766, | |
| "loss": 0.0041, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 35.39, | |
| "learning_rate": 0.00017037037037037037, | |
| "loss": 0.0061, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 36.17, | |
| "learning_rate": 0.00016975308641975308, | |
| "loss": 0.0039, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 36.33, | |
| "learning_rate": 0.00016913580246913582, | |
| "loss": 0.007, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 37.11, | |
| "learning_rate": 0.00016851851851851853, | |
| "loss": 0.0049, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 37.28, | |
| "learning_rate": 0.00016790123456790124, | |
| "loss": 0.005, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 38.06, | |
| "learning_rate": 0.00016728395061728398, | |
| "loss": 0.0074, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 38.22, | |
| "learning_rate": 0.0001666666666666667, | |
| "loss": 0.0035, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 38.39, | |
| "learning_rate": 0.0001660493827160494, | |
| "loss": 0.0067, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 39.17, | |
| "learning_rate": 0.0001654320987654321, | |
| "loss": 0.0061, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 39.33, | |
| "learning_rate": 0.00016481481481481482, | |
| "loss": 0.0047, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 40.11, | |
| "learning_rate": 0.00016419753086419753, | |
| "loss": 0.0035, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 40.28, | |
| "learning_rate": 0.00016358024691358024, | |
| "loss": 0.0072, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 41.06, | |
| "learning_rate": 0.00016296296296296295, | |
| "loss": 0.0044, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 41.22, | |
| "learning_rate": 0.0001623456790123457, | |
| "loss": 0.0031, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 41.39, | |
| "learning_rate": 0.0001617283950617284, | |
| "loss": 0.0047, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 42.17, | |
| "learning_rate": 0.0001611111111111111, | |
| "loss": 0.004, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 42.33, | |
| "learning_rate": 0.00016049382716049385, | |
| "loss": 0.0048, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 43.11, | |
| "learning_rate": 0.00015987654320987656, | |
| "loss": 0.0028, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 43.28, | |
| "learning_rate": 0.00015925925925925927, | |
| "loss": 0.0043, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 44.06, | |
| "learning_rate": 0.00015864197530864198, | |
| "loss": 0.0037, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 44.22, | |
| "learning_rate": 0.0001580246913580247, | |
| "loss": 0.0032, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 44.39, | |
| "learning_rate": 0.00015740740740740743, | |
| "loss": 0.0045, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 45.17, | |
| "learning_rate": 0.00015679012345679014, | |
| "loss": 0.0041, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 45.33, | |
| "learning_rate": 0.00015617283950617285, | |
| "loss": 0.0032, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 46.11, | |
| "learning_rate": 0.00015555555555555556, | |
| "loss": 0.002, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 46.28, | |
| "learning_rate": 0.00015493827160493827, | |
| "loss": 0.0038, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 47.06, | |
| "learning_rate": 0.00015432098765432098, | |
| "loss": 0.0036, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 47.22, | |
| "learning_rate": 0.0001537037037037037, | |
| "loss": 0.0035, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 47.39, | |
| "learning_rate": 0.0001530864197530864, | |
| "loss": 0.0035, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 48.17, | |
| "learning_rate": 0.00015246913580246914, | |
| "loss": 0.005, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 48.33, | |
| "learning_rate": 0.00015185185185185185, | |
| "loss": 0.0019, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 49.11, | |
| "learning_rate": 0.0001512345679012346, | |
| "loss": 0.0033, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 49.28, | |
| "learning_rate": 0.0001506172839506173, | |
| "loss": 0.0016, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 50.06, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 0.0034, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 50.22, | |
| "learning_rate": 0.00014938271604938272, | |
| "loss": 0.0028, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 50.39, | |
| "learning_rate": 0.00014876543209876544, | |
| "loss": 0.0039, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 51.17, | |
| "learning_rate": 0.00014814814814814815, | |
| "loss": 0.0019, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 51.33, | |
| "learning_rate": 0.00014753086419753086, | |
| "loss": 0.0042, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 52.11, | |
| "learning_rate": 0.0001469135802469136, | |
| "loss": 0.0016, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 52.28, | |
| "learning_rate": 0.0001462962962962963, | |
| "loss": 0.0022, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 53.06, | |
| "learning_rate": 0.00014567901234567902, | |
| "loss": 0.0048, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 53.22, | |
| "learning_rate": 0.00014506172839506173, | |
| "loss": 0.0014, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 53.39, | |
| "learning_rate": 0.00014444444444444444, | |
| "loss": 0.0026, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 54.17, | |
| "learning_rate": 0.00014382716049382718, | |
| "loss": 0.0036, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 54.33, | |
| "learning_rate": 0.00014320987654320989, | |
| "loss": 0.0019, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 55.11, | |
| "learning_rate": 0.0001425925925925926, | |
| "loss": 0.0033, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 55.28, | |
| "learning_rate": 0.00014197530864197534, | |
| "loss": 0.0012, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 56.06, | |
| "learning_rate": 0.00014135802469135805, | |
| "loss": 0.0019, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 56.22, | |
| "learning_rate": 0.00014074074074074076, | |
| "loss": 0.002, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 56.39, | |
| "learning_rate": 0.00014012345679012347, | |
| "loss": 0.0031, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 57.17, | |
| "learning_rate": 0.00013950617283950618, | |
| "loss": 0.0042, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 57.33, | |
| "learning_rate": 0.0001388888888888889, | |
| "loss": 0.0008, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 58.11, | |
| "learning_rate": 0.0001382716049382716, | |
| "loss": 0.0012, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 58.28, | |
| "learning_rate": 0.0001376543209876543, | |
| "loss": 0.0026, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 59.06, | |
| "learning_rate": 0.00013703703703703705, | |
| "loss": 0.0025, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 59.22, | |
| "learning_rate": 0.00013641975308641976, | |
| "loss": 0.0026, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 59.39, | |
| "learning_rate": 0.00013580246913580247, | |
| "loss": 0.0014, | |
| "step": 420 | |
| } | |
| ], | |
| "logging_steps": 3, | |
| "max_steps": 1080, | |
| "num_train_epochs": 60, | |
| "save_steps": 500, | |
| "total_flos": 3.41435505180672e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |