| { | |
| "best_metric": 2.6964175701141357, | |
| "best_model_checkpoint": "output/elton-john/checkpoint-316", | |
| "epoch": 2.0, | |
| "global_step": 316, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0001368737597109298, | |
| "loss": 3.2318, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001358981418386525, | |
| "loss": 3.49, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00013428242585420812, | |
| "loss": 3.4363, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00013204197944418041, | |
| "loss": 3.3779, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00012919811234281, | |
| "loss": 3.5486, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00012577787364704225, | |
| "loss": 3.156, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0001218137945423232, | |
| "loss": 3.4707, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00011734357888617527, | |
| "loss": 3.2883, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00011240974459253048, | |
| "loss": 3.2899, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00010705921922774988, | |
| "loss": 3.0576, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00010134289366476881, | |
| "loss": 2.9129, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.531513804073266e-05, | |
| "loss": 3.1296, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 8.903328462203299e-05, | |
| "loss": 3.3835, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 8.255708249541069e-05, | |
| "loss": 3.0019, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 7.594812927176565e-05, | |
| "loss": 3.0587, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 6.926928520795311e-05, | |
| "loss": 3.2402, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 6.258407531907605e-05, | |
| "loss": 3.0662, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 5.5956085168010344e-05, | |
| "loss": 3.0189, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.9448356079036955e-05, | |
| "loss": 3.3076, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.312278552793369e-05, | |
| "loss": 3.1185, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.703953841164296e-05, | |
| "loss": 2.9762, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.125647479715328e-05, | |
| "loss": 3.1833, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.5828599592490882e-05, | |
| "loss": 3.0905, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.080753937420995e-05, | |
| "loss": 3.0524, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.624105134747227e-05, | |
| "loss": 2.931, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.2172569109183958e-05, | |
| "loss": 3.0424, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 8.640789534607252e-06, | |
| "loss": 3.3083, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 5.679304716725937e-06, | |
| "loss": 3.3548, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 3.3162824591277986e-06, | |
| "loss": 3.1647, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.574198361357997e-06, | |
| "loss": 3.0154, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.696220449804098e-07, | |
| "loss": 3.0273, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.3059553632214649e-08, | |
| "loss": 3.1636, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 3.0393269062042236, | |
| "eval_runtime": 8.8221, | |
| "eval_samples_per_second": 23.351, | |
| "eval_steps_per_second": 2.947, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 6.633985999863723e-07, | |
| "loss": 3.0203, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.94348327322787e-06, | |
| "loss": 2.7138, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.881847639061565e-06, | |
| "loss": 3.1889, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 6.459348994545111e-06, | |
| "loss": 3.1967, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 9.650532712235251e-06, | |
| "loss": 2.5474, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.3423883622425528e-05, | |
| "loss": 3.0815, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.7742137247474782e-05, | |
| "loss": 2.8284, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 2.256264781457008e-05, | |
| "loss": 2.9571, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 2.783780941254068e-05, | |
| "loss": 2.8924, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.3515526133505226e-05, | |
| "loss": 3.1057, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.953972655637492e-05, | |
| "loss": 2.7295, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.585091749132818e-05, | |
| "loss": 2.9389, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 5.238677151664272e-05, | |
| "loss": 2.9111, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.908274250554988e-05, | |
| "loss": 2.9365, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 6.587270306435089e-05, | |
| "loss": 2.9594, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 7.268959758664963e-05, | |
| "loss": 3.0584, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 7.946610447433497e-05, | |
| "loss": 2.8725, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 8.613530098541655e-05, | |
| "loss": 2.8485, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.263132414287828e-05, | |
| "loss": 3.0395, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.889002117761157e-05, | |
| "loss": 2.9287, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.0001048495830818497, | |
| "loss": 3.1458, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.000110451155016322, | |
| "loss": 2.9484, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.00011563941754292792, | |
| "loss": 2.8225, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.00012036313294285455, | |
| "loss": 2.9005, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.00012457565122486245, | |
| "loss": 2.915, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.00012823537082655537, | |
| "loss": 2.7606, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.00013130614945888676, | |
| "loss": 3.1073, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.0001337576610365259, | |
| "loss": 2.7606, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.0001355656951691469, | |
| "loss": 2.9132, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.00013671239625595536, | |
| "loss": 3.0598, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.0001371864398222265, | |
| "loss": 2.8386, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 2.6964175701141357, | |
| "eval_runtime": 2.7786, | |
| "eval_samples_per_second": 82.056, | |
| "eval_steps_per_second": 10.437, | |
| "step": 316 | |
| } | |
| ], | |
| "max_steps": 316, | |
| "num_train_epochs": 2, | |
| "total_flos": 329750544384000.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |