| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.488335925349922, | |
| "global_step": 100, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00099375, | |
| "loss": 2.9857, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0009875, | |
| "loss": 2.8497, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00098125, | |
| "loss": 2.8853, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.000975, | |
| "loss": 2.7689, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00096875, | |
| "loss": 2.7474, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0009625, | |
| "loss": 2.7695, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0009562500000000001, | |
| "loss": 2.6897, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00095, | |
| "loss": 2.6285, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00094375, | |
| "loss": 2.7107, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0009375, | |
| "loss": 2.6897, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00093125, | |
| "loss": 2.6634, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.000925, | |
| "loss": 2.635, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00091875, | |
| "loss": 2.643, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0009125, | |
| "loss": 2.7166, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00090625, | |
| "loss": 2.7354, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.0009000000000000001, | |
| "loss": 2.615, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00089375, | |
| "loss": 2.7225, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.0008874999999999999, | |
| "loss": 2.6189, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00088125, | |
| "loss": 2.7269, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.000875, | |
| "loss": 2.6312, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.0008687500000000001, | |
| "loss": 2.6758, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.0008625000000000001, | |
| "loss": 2.6997, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00085625, | |
| "loss": 2.6878, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00085, | |
| "loss": 2.6433, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00084375, | |
| "loss": 2.7053, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.0008375, | |
| "loss": 2.6745, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.0008312500000000001, | |
| "loss": 2.7376, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.000825, | |
| "loss": 2.6577, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00081875, | |
| "loss": 2.7428, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.0008125000000000001, | |
| "loss": 2.6231, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00080625, | |
| "loss": 2.7078, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.0008, | |
| "loss": 2.7057, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00079375, | |
| "loss": 2.6594, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.0007875, | |
| "loss": 2.6718, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00078125, | |
| "loss": 2.6459, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.0007750000000000001, | |
| "loss": 2.6558, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00076875, | |
| "loss": 2.6559, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.0007624999999999999, | |
| "loss": 2.6488, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00075625, | |
| "loss": 2.5902, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00075, | |
| "loss": 2.6244, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.00074375, | |
| "loss": 2.5278, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.0007375000000000001, | |
| "loss": 2.5213, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.00073125, | |
| "loss": 2.5515, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.000725, | |
| "loss": 2.5196, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.00071875, | |
| "loss": 2.5725, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.0007125, | |
| "loss": 2.4971, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.0007062500000000001, | |
| "loss": 2.4976, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.0007, | |
| "loss": 2.5467, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.00069375, | |
| "loss": 2.5302, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.0006875, | |
| "loss": 2.5224, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.00068125, | |
| "loss": 2.5451, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.000675, | |
| "loss": 2.4609, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.00066875, | |
| "loss": 2.4897, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.0006625, | |
| "loss": 2.5583, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.00065625, | |
| "loss": 2.5675, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.0006500000000000001, | |
| "loss": 2.5598, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.00064375, | |
| "loss": 2.48, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.0006374999999999999, | |
| "loss": 2.4761, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.00063125, | |
| "loss": 2.5599, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.000625, | |
| "loss": 2.5199, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.00061875, | |
| "loss": 2.5161, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.0006125000000000001, | |
| "loss": 2.5797, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.00060625, | |
| "loss": 2.5345, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 0.0006, | |
| "loss": 2.4726, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.00059375, | |
| "loss": 2.448, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.0005875, | |
| "loss": 2.4939, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.0005812500000000001, | |
| "loss": 2.4881, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.000575, | |
| "loss": 2.5731, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00056875, | |
| "loss": 2.5114, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.0005625000000000001, | |
| "loss": 2.5049, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.00055625, | |
| "loss": 2.4856, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.00055, | |
| "loss": 2.5077, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.00054375, | |
| "loss": 2.4801, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.0005375, | |
| "loss": 2.5599, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.00053125, | |
| "loss": 2.5269, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.0005250000000000001, | |
| "loss": 2.4963, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.00051875, | |
| "loss": 2.4595, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.0005124999999999999, | |
| "loss": 2.4801, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.00050625, | |
| "loss": 2.5154, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.0005, | |
| "loss": 2.5179, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.00049375, | |
| "loss": 2.4121, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 0.0004875, | |
| "loss": 2.354, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 0.00048125, | |
| "loss": 2.3033, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 0.000475, | |
| "loss": 2.4023, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 0.00046875, | |
| "loss": 2.3755, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.0004625, | |
| "loss": 2.3892, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 0.00045625, | |
| "loss": 2.3534, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 0.00045000000000000004, | |
| "loss": 2.3634, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.00044374999999999997, | |
| "loss": 2.3336, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.0004375, | |
| "loss": 2.3646, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.00043125000000000005, | |
| "loss": 2.3333, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 0.000425, | |
| "loss": 2.3344, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.00041875, | |
| "loss": 2.3308, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.0004125, | |
| "loss": 2.3677, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 0.00040625000000000004, | |
| "loss": 2.3755, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 0.0004, | |
| "loss": 2.3721, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 0.00039375, | |
| "loss": 2.3463, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 0.00038750000000000004, | |
| "loss": 2.3536, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 0.00038124999999999997, | |
| "loss": 2.3895, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 0.000375, | |
| "loss": 2.3767, | |
| "step": 100 | |
| } | |
| ], | |
| "max_steps": 160, | |
| "num_train_epochs": 4, | |
| "total_flos": 5.95613039443968e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |