| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 39.388888888888886, | |
| "eval_steps": 500, | |
| "global_step": 280, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 1.727, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 1.9644, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 2.5e-05, | |
| "loss": 1.8722, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 1.6195, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 1.8716, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 5e-05, | |
| "loss": 1.5027, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 5.833333333333334e-05, | |
| "loss": 1.6891, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 1.5746, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 1.3677, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 8.333333333333334e-05, | |
| "loss": 1.5316, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 9.166666666666667e-05, | |
| "loss": 1.361, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 0.0001, | |
| "loss": 1.042, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 0.00010833333333333333, | |
| "loss": 1.2362, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 0.00011666666666666668, | |
| "loss": 1.1618, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 0.000125, | |
| "loss": 1.0433, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 0.00013333333333333334, | |
| "loss": 1.0156, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 0.00014166666666666668, | |
| "loss": 0.794, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 0.8204, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 0.00015833333333333332, | |
| "loss": 0.6787, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 0.0001666666666666667, | |
| "loss": 0.5017, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 0.000175, | |
| "loss": 0.4836, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 0.00018333333333333334, | |
| "loss": 0.3545, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 9.33, | |
| "learning_rate": 0.00019166666666666667, | |
| "loss": 0.5009, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 10.11, | |
| "learning_rate": 0.0002, | |
| "loss": 0.3405, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 10.28, | |
| "learning_rate": 0.0001990740740740741, | |
| "loss": 0.3549, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 11.06, | |
| "learning_rate": 0.00019814814814814814, | |
| "loss": 0.2224, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 11.22, | |
| "learning_rate": 0.00019722222222222225, | |
| "loss": 0.2505, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 11.39, | |
| "learning_rate": 0.0001962962962962963, | |
| "loss": 0.2139, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 12.17, | |
| "learning_rate": 0.00019537037037037038, | |
| "loss": 0.1912, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 12.33, | |
| "learning_rate": 0.00019444444444444446, | |
| "loss": 0.1754, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 13.11, | |
| "learning_rate": 0.0001935185185185185, | |
| "loss": 0.1442, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 13.28, | |
| "learning_rate": 0.0001925925925925926, | |
| "loss": 0.1333, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 14.06, | |
| "learning_rate": 0.00019166666666666667, | |
| "loss": 0.1117, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 14.22, | |
| "learning_rate": 0.00019074074074074075, | |
| "loss": 0.0805, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 14.39, | |
| "learning_rate": 0.00018981481481481483, | |
| "loss": 0.0944, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 15.17, | |
| "learning_rate": 0.00018888888888888888, | |
| "loss": 0.0615, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 15.33, | |
| "learning_rate": 0.00018796296296296296, | |
| "loss": 0.0458, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 16.11, | |
| "learning_rate": 0.00018703703703703704, | |
| "loss": 0.0527, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 16.28, | |
| "learning_rate": 0.00018611111111111112, | |
| "loss": 0.0355, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 17.06, | |
| "learning_rate": 0.0001851851851851852, | |
| "loss": 0.0407, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 17.22, | |
| "learning_rate": 0.00018425925925925926, | |
| "loss": 0.0283, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 17.39, | |
| "learning_rate": 0.00018333333333333334, | |
| "loss": 0.0329, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 18.17, | |
| "learning_rate": 0.00018240740740740742, | |
| "loss": 0.0239, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 18.33, | |
| "learning_rate": 0.0001814814814814815, | |
| "loss": 0.0241, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 19.11, | |
| "learning_rate": 0.00018055555555555557, | |
| "loss": 0.0138, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 19.28, | |
| "learning_rate": 0.00017962962962962963, | |
| "loss": 0.0215, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 20.06, | |
| "learning_rate": 0.0001787037037037037, | |
| "loss": 0.0198, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 20.22, | |
| "learning_rate": 0.00017777777777777779, | |
| "loss": 0.0164, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 20.39, | |
| "learning_rate": 0.00017685185185185187, | |
| "loss": 0.0218, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 21.17, | |
| "learning_rate": 0.00017592592592592595, | |
| "loss": 0.0139, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 21.33, | |
| "learning_rate": 0.000175, | |
| "loss": 0.0191, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 22.11, | |
| "learning_rate": 0.00017407407407407408, | |
| "loss": 0.0111, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 22.28, | |
| "learning_rate": 0.00017314814814814816, | |
| "loss": 0.0134, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 23.06, | |
| "learning_rate": 0.00017222222222222224, | |
| "loss": 0.0163, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 23.22, | |
| "learning_rate": 0.00017129629629629632, | |
| "loss": 0.0097, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 23.39, | |
| "learning_rate": 0.00017037037037037037, | |
| "loss": 0.0141, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 24.17, | |
| "learning_rate": 0.00016944444444444445, | |
| "loss": 0.0122, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 24.33, | |
| "learning_rate": 0.00016851851851851853, | |
| "loss": 0.0111, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 25.11, | |
| "learning_rate": 0.00016759259259259258, | |
| "loss": 0.0105, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 25.28, | |
| "learning_rate": 0.0001666666666666667, | |
| "loss": 0.0087, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 26.06, | |
| "learning_rate": 0.00016574074074074074, | |
| "loss": 0.0097, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 26.22, | |
| "learning_rate": 0.00016481481481481482, | |
| "loss": 0.0099, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 26.39, | |
| "learning_rate": 0.0001638888888888889, | |
| "loss": 0.009, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 27.17, | |
| "learning_rate": 0.00016296296296296295, | |
| "loss": 0.0097, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 27.33, | |
| "learning_rate": 0.00016203703703703706, | |
| "loss": 0.008, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 28.11, | |
| "learning_rate": 0.0001611111111111111, | |
| "loss": 0.008, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 28.28, | |
| "learning_rate": 0.0001601851851851852, | |
| "loss": 0.0055, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 29.06, | |
| "learning_rate": 0.00015925925925925927, | |
| "loss": 0.0094, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 29.22, | |
| "learning_rate": 0.00015833333333333332, | |
| "loss": 0.0052, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 29.39, | |
| "learning_rate": 0.00015740740740740743, | |
| "loss": 0.0072, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 30.17, | |
| "learning_rate": 0.00015648148148148148, | |
| "loss": 0.0066, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 30.33, | |
| "learning_rate": 0.00015555555555555556, | |
| "loss": 0.006, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 31.11, | |
| "learning_rate": 0.00015462962962962964, | |
| "loss": 0.0078, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 31.28, | |
| "learning_rate": 0.0001537037037037037, | |
| "loss": 0.0051, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 32.06, | |
| "learning_rate": 0.00015277777777777777, | |
| "loss": 0.0042, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 32.22, | |
| "learning_rate": 0.00015185185185185185, | |
| "loss": 0.0058, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 32.39, | |
| "learning_rate": 0.00015092592592592593, | |
| "loss": 0.0059, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 33.17, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 0.0065, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 33.33, | |
| "learning_rate": 0.00014907407407407407, | |
| "loss": 0.0047, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 34.11, | |
| "learning_rate": 0.00014814814814814815, | |
| "loss": 0.0061, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 34.28, | |
| "learning_rate": 0.00014722222222222223, | |
| "loss": 0.0031, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 35.06, | |
| "learning_rate": 0.0001462962962962963, | |
| "loss": 0.0066, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 35.22, | |
| "learning_rate": 0.00014537037037037039, | |
| "loss": 0.003, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 35.39, | |
| "learning_rate": 0.00014444444444444444, | |
| "loss": 0.0054, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 36.17, | |
| "learning_rate": 0.00014351851851851852, | |
| "loss": 0.0029, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 36.33, | |
| "learning_rate": 0.0001425925925925926, | |
| "loss": 0.0063, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 37.11, | |
| "learning_rate": 0.00014166666666666668, | |
| "loss": 0.0039, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 37.28, | |
| "learning_rate": 0.00014074074074074076, | |
| "loss": 0.0043, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 38.06, | |
| "learning_rate": 0.0001398148148148148, | |
| "loss": 0.0063, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 38.22, | |
| "learning_rate": 0.0001388888888888889, | |
| "loss": 0.003, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 38.39, | |
| "learning_rate": 0.00013796296296296297, | |
| "loss": 0.005, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 39.17, | |
| "learning_rate": 0.00013703703703703705, | |
| "loss": 0.005, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 39.33, | |
| "learning_rate": 0.00013611111111111113, | |
| "loss": 0.0041, | |
| "step": 279 | |
| } | |
| ], | |
| "logging_steps": 3, | |
| "max_steps": 720, | |
| "num_train_epochs": 40, | |
| "save_steps": 500, | |
| "total_flos": 2.27623670120448e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |