| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 83.33333333333333, | |
| "global_step": 3000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 3e-06, | |
| "loss": 7.0999, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 6e-06, | |
| "loss": 3.9223, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 9e-06, | |
| "loss": 2.5274, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 1.2e-05, | |
| "loss": 2.1012, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 1.5e-05, | |
| "loss": 1.9323, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 1.8e-05, | |
| "loss": 1.7811, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 2.1e-05, | |
| "loss": 1.7308, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 11.11, | |
| "learning_rate": 2.4e-05, | |
| "loss": 1.6515, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 2.7000000000000002e-05, | |
| "loss": 1.6299, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 13.89, | |
| "learning_rate": 3e-05, | |
| "loss": 1.6019, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 15.28, | |
| "learning_rate": 2.9516129032258067e-05, | |
| "loss": 1.5727, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 16.67, | |
| "learning_rate": 2.903225806451613e-05, | |
| "loss": 1.5442, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 18.06, | |
| "learning_rate": 2.8548387096774196e-05, | |
| "loss": 1.5382, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 19.44, | |
| "learning_rate": 2.806451612903226e-05, | |
| "loss": 1.5224, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 20.83, | |
| "learning_rate": 2.758064516129032e-05, | |
| "loss": 1.5323, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 22.22, | |
| "learning_rate": 2.7096774193548387e-05, | |
| "loss": 1.5036, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 23.61, | |
| "learning_rate": 2.6612903225806453e-05, | |
| "loss": 1.4981, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 2.6129032258064516e-05, | |
| "loss": 1.5055, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 26.39, | |
| "learning_rate": 2.5645161290322582e-05, | |
| "loss": 1.4892, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 27.78, | |
| "learning_rate": 2.5161290322580648e-05, | |
| "loss": 1.4892, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 29.17, | |
| "learning_rate": 2.467741935483871e-05, | |
| "loss": 1.4802, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 30.56, | |
| "learning_rate": 2.4193548387096773e-05, | |
| "loss": 1.4804, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 31.94, | |
| "learning_rate": 2.370967741935484e-05, | |
| "loss": 1.4692, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 33.33, | |
| "learning_rate": 2.3225806451612902e-05, | |
| "loss": 1.478, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 34.72, | |
| "learning_rate": 2.274193548387097e-05, | |
| "loss": 1.4692, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 36.11, | |
| "learning_rate": 2.2258064516129034e-05, | |
| "loss": 1.465, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 37.5, | |
| "learning_rate": 2.1774193548387097e-05, | |
| "loss": 1.4653, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 38.89, | |
| "learning_rate": 2.1290322580645163e-05, | |
| "loss": 1.4636, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 40.28, | |
| "learning_rate": 2.080645161290323e-05, | |
| "loss": 1.467, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 41.67, | |
| "learning_rate": 2.032258064516129e-05, | |
| "loss": 1.4605, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 43.06, | |
| "learning_rate": 1.9838709677419355e-05, | |
| "loss": 1.4617, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 44.44, | |
| "learning_rate": 1.935483870967742e-05, | |
| "loss": 1.4632, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 45.83, | |
| "learning_rate": 1.8870967741935484e-05, | |
| "loss": 1.4641, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 47.22, | |
| "learning_rate": 1.838709677419355e-05, | |
| "loss": 1.4528, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 48.61, | |
| "learning_rate": 1.7903225806451616e-05, | |
| "loss": 1.4566, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "learning_rate": 1.741935483870968e-05, | |
| "loss": 1.4522, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 51.39, | |
| "learning_rate": 1.6935483870967744e-05, | |
| "loss": 1.4598, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 52.78, | |
| "learning_rate": 1.6451612903225807e-05, | |
| "loss": 1.454, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 54.17, | |
| "learning_rate": 1.596774193548387e-05, | |
| "loss": 1.4568, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 55.56, | |
| "learning_rate": 1.5483870967741936e-05, | |
| "loss": 1.4528, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 56.94, | |
| "learning_rate": 1.5e-05, | |
| "loss": 1.4471, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 58.33, | |
| "learning_rate": 1.4516129032258065e-05, | |
| "loss": 1.4471, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 59.72, | |
| "learning_rate": 1.403225806451613e-05, | |
| "loss": 1.4554, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 61.11, | |
| "learning_rate": 1.3548387096774194e-05, | |
| "loss": 1.4435, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 62.5, | |
| "learning_rate": 1.3064516129032258e-05, | |
| "loss": 1.4456, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 63.89, | |
| "learning_rate": 1.2580645161290324e-05, | |
| "loss": 1.4452, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 65.28, | |
| "learning_rate": 1.2096774193548387e-05, | |
| "loss": 1.4496, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 66.67, | |
| "learning_rate": 1.1612903225806451e-05, | |
| "loss": 1.4431, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 68.06, | |
| "learning_rate": 1.1129032258064517e-05, | |
| "loss": 1.4433, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 69.44, | |
| "learning_rate": 1.0645161290322582e-05, | |
| "loss": 1.4437, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 70.83, | |
| "learning_rate": 1.0161290322580644e-05, | |
| "loss": 1.4414, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 72.22, | |
| "learning_rate": 9.67741935483871e-06, | |
| "loss": 1.4413, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 73.61, | |
| "learning_rate": 9.193548387096775e-06, | |
| "loss": 1.4414, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 75.0, | |
| "learning_rate": 8.70967741935484e-06, | |
| "loss": 1.4396, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 76.39, | |
| "learning_rate": 8.225806451612904e-06, | |
| "loss": 1.4415, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 77.78, | |
| "learning_rate": 7.741935483870968e-06, | |
| "loss": 1.443, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 79.17, | |
| "learning_rate": 7.258064516129032e-06, | |
| "loss": 1.4432, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 80.56, | |
| "learning_rate": 6.774193548387097e-06, | |
| "loss": 1.4389, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 81.94, | |
| "learning_rate": 6.290322580645162e-06, | |
| "loss": 1.4383, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 83.33, | |
| "learning_rate": 5.8064516129032256e-06, | |
| "loss": 1.4387, | |
| "step": 3000 | |
| } | |
| ], | |
| "max_steps": 3600, | |
| "num_train_epochs": 100, | |
| "total_flos": 7215621246812160.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |