| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 30.0, | |
| "eval_steps": 500, | |
| "global_step": 1350, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.962962962962963e-05, | |
| "loss": 0.6074, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.925925925925926e-05, | |
| "loss": 0.4951, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.888888888888889e-05, | |
| "loss": 0.4154, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.851851851851852e-05, | |
| "loss": 0.3709, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.814814814814815e-05, | |
| "loss": 0.3047, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 1.7777777777777777e-05, | |
| "loss": 0.2429, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 1.740740740740741e-05, | |
| "loss": 0.2105, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 1.7037037037037038e-05, | |
| "loss": 0.1756, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.1627, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 1.6296296296296297e-05, | |
| "loss": 0.1145, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 1.5925925925925926e-05, | |
| "loss": 0.1259, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.555555555555556e-05, | |
| "loss": 0.1048, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 1.5185185185185187e-05, | |
| "loss": 0.0817, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 7.78, | |
| "learning_rate": 1.4814814814814815e-05, | |
| "loss": 0.0803, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 1.4444444444444446e-05, | |
| "loss": 0.0794, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 1.4074074074074075e-05, | |
| "loss": 0.0689, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 1.3703703703703706e-05, | |
| "loss": 0.0506, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.0618, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 10.56, | |
| "learning_rate": 1.2962962962962964e-05, | |
| "loss": 0.0442, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 11.11, | |
| "learning_rate": 1.2592592592592593e-05, | |
| "loss": 0.0497, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 11.67, | |
| "learning_rate": 1.2222222222222224e-05, | |
| "loss": 0.0401, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 12.22, | |
| "learning_rate": 1.1851851851851852e-05, | |
| "loss": 0.0399, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 12.78, | |
| "learning_rate": 1.1481481481481482e-05, | |
| "loss": 0.0353, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 13.33, | |
| "learning_rate": 1.1111111111111113e-05, | |
| "loss": 0.0339, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 13.89, | |
| "learning_rate": 1.0740740740740742e-05, | |
| "loss": 0.0354, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 14.44, | |
| "learning_rate": 1.037037037037037e-05, | |
| "loss": 0.0249, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "learning_rate": 1e-05, | |
| "loss": 0.0321, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 15.56, | |
| "learning_rate": 9.62962962962963e-06, | |
| "loss": 0.0256, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 16.11, | |
| "learning_rate": 9.25925925925926e-06, | |
| "loss": 0.028, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 16.67, | |
| "learning_rate": 8.888888888888888e-06, | |
| "loss": 0.0237, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 17.22, | |
| "learning_rate": 8.518518518518519e-06, | |
| "loss": 0.0193, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 17.78, | |
| "learning_rate": 8.148148148148148e-06, | |
| "loss": 0.0218, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 18.33, | |
| "learning_rate": 7.77777777777778e-06, | |
| "loss": 0.0195, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 18.89, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 0.0172, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 19.44, | |
| "learning_rate": 7.0370370370370375e-06, | |
| "loss": 0.0159, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.0199, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 20.56, | |
| "learning_rate": 6.296296296296297e-06, | |
| "loss": 0.0115, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 21.11, | |
| "learning_rate": 5.925925925925926e-06, | |
| "loss": 0.0185, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 21.67, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 0.0104, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 22.22, | |
| "learning_rate": 5.185185185185185e-06, | |
| "loss": 0.0108, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 22.78, | |
| "learning_rate": 4.814814814814815e-06, | |
| "loss": 0.012, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 23.33, | |
| "learning_rate": 4.45925925925926e-06, | |
| "loss": 0.0102, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 23.89, | |
| "learning_rate": 4.088888888888889e-06, | |
| "loss": 0.0131, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 24.44, | |
| "learning_rate": 3.718518518518519e-06, | |
| "loss": 0.0124, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 3.348148148148148e-06, | |
| "loss": 0.0132, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 25.56, | |
| "learning_rate": 2.9777777777777777e-06, | |
| "loss": 0.0113, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 26.11, | |
| "learning_rate": 2.6074074074074073e-06, | |
| "loss": 0.0111, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 26.67, | |
| "learning_rate": 2.2370370370370373e-06, | |
| "loss": 0.0087, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 27.22, | |
| "learning_rate": 1.8666666666666669e-06, | |
| "loss": 0.0176, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 27.78, | |
| "learning_rate": 1.4962962962962964e-06, | |
| "loss": 0.0081, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 28.33, | |
| "learning_rate": 1.125925925925926e-06, | |
| "loss": 0.01, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 28.89, | |
| "learning_rate": 7.555555555555556e-07, | |
| "loss": 0.0065, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 29.44, | |
| "learning_rate": 3.8518518518518525e-07, | |
| "loss": 0.009, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "learning_rate": 1.4814814814814816e-08, | |
| "loss": 0.008, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "step": 1350, | |
| "total_flos": 1.68739697578752e+16, | |
| "train_loss": 0.0829941279579092, | |
| "train_runtime": 525.2202, | |
| "train_samples_per_second": 488.424, | |
| "train_steps_per_second": 2.57 | |
| } | |
| ], | |
| "logging_steps": 25, | |
| "max_steps": 1350, | |
| "num_train_epochs": 30, | |
| "save_steps": 500, | |
| "total_flos": 1.68739697578752e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |