| { | |
| "best_metric": 1.207701563835144, | |
| "best_model_checkpoint": "outputs/checkpoint-400", | |
| "epoch": 0.128, | |
| "global_step": 400, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1e-05, | |
| "loss": 1.8184, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 2e-05, | |
| "loss": 1.7858, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 3e-05, | |
| "loss": 1.7218, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4e-05, | |
| "loss": 1.7218, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5e-05, | |
| "loss": 1.7009, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6e-05, | |
| "loss": 1.6714, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 7e-05, | |
| "loss": 1.6582, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8e-05, | |
| "loss": 1.5659, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9e-05, | |
| "loss": 1.5343, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.0001, | |
| "loss": 1.5252, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00011000000000000002, | |
| "loss": 1.414, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00012, | |
| "loss": 1.3919, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00013000000000000002, | |
| "loss": 1.352, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00014, | |
| "loss": 1.3762, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 1.3238, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00016, | |
| "loss": 1.3306, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00017, | |
| "loss": 1.3423, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00018, | |
| "loss": 1.3199, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019, | |
| "loss": 1.3506, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0002, | |
| "loss": 1.3315, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0001998921832884097, | |
| "loss": 1.3142, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019978436657681943, | |
| "loss": 1.2994, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019967654986522912, | |
| "loss": 1.2972, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019956873315363883, | |
| "loss": 1.3135, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019946091644204851, | |
| "loss": 1.2903, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019935309973045823, | |
| "loss": 1.2783, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019924528301886794, | |
| "loss": 1.3044, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019913746630727762, | |
| "loss": 1.2947, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019902964959568736, | |
| "loss": 1.2759, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019892183288409705, | |
| "loss": 1.2746, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019881401617250676, | |
| "loss": 1.2869, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019870619946091644, | |
| "loss": 1.26, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019859838274932616, | |
| "loss": 1.258, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019849056603773587, | |
| "loss": 1.2649, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019838274932614555, | |
| "loss": 1.2549, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019827493261455526, | |
| "loss": 1.2859, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019816711590296498, | |
| "loss": 1.2934, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019805929919137466, | |
| "loss": 1.3011, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019795148247978437, | |
| "loss": 1.2304, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019784366576819408, | |
| "loss": 1.2849, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "eval_loss": 1.2345943450927734, | |
| "eval_runtime": 710.2534, | |
| "eval_samples_per_second": 7.04, | |
| "eval_steps_per_second": 0.88, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001977358490566038, | |
| "loss": 1.2726, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019762803234501348, | |
| "loss": 1.2996, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001975202156334232, | |
| "loss": 1.2456, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001974123989218329, | |
| "loss": 1.2794, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001973045822102426, | |
| "loss": 1.2637, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001971967654986523, | |
| "loss": 1.252, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.000197088948787062, | |
| "loss": 1.2492, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.0001969811320754717, | |
| "loss": 1.2972, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.0001968733153638814, | |
| "loss": 1.3023, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019676549865229112, | |
| "loss": 1.2682, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019665768194070083, | |
| "loss": 1.2981, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019654986522911052, | |
| "loss": 1.3084, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019644204851752023, | |
| "loss": 1.2666, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019633423180592994, | |
| "loss": 1.2623, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019622641509433963, | |
| "loss": 1.2406, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019611859838274934, | |
| "loss": 1.2623, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019601078167115905, | |
| "loss": 1.2562, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019590296495956873, | |
| "loss": 1.2166, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019579514824797845, | |
| "loss": 1.2931, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019568733153638813, | |
| "loss": 1.2354, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019557951482479787, | |
| "loss": 1.2246, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019547169811320755, | |
| "loss": 1.2236, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019536388140161727, | |
| "loss": 1.2442, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019525606469002698, | |
| "loss": 1.2779, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019514824797843666, | |
| "loss": 1.2285, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019504043126684637, | |
| "loss": 1.2157, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019493261455525606, | |
| "loss": 1.2405, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001948247978436658, | |
| "loss": 1.2335, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019471698113207548, | |
| "loss": 1.2401, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019460916442048517, | |
| "loss": 1.2521, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001945013477088949, | |
| "loss": 1.2386, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001943935309973046, | |
| "loss": 1.2196, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001942857142857143, | |
| "loss": 1.192, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.000194177897574124, | |
| "loss": 1.1964, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001940700808625337, | |
| "loss": 1.2876, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001939622641509434, | |
| "loss": 1.2622, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001938544474393531, | |
| "loss": 1.2183, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019374663072776284, | |
| "loss": 1.206, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019363881401617252, | |
| "loss": 1.2697, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0001935309973045822, | |
| "loss": 1.2515, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "eval_loss": 1.207701563835144, | |
| "eval_runtime": 709.7794, | |
| "eval_samples_per_second": 7.044, | |
| "eval_steps_per_second": 0.881, | |
| "step": 400 | |
| } | |
| ], | |
| "max_steps": 9375, | |
| "num_train_epochs": 3, | |
| "total_flos": 2.38239419990016e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |