| { | |
| "best_metric": 1.536595344543457, | |
| "best_model_checkpoint": "output/face/checkpoint-402", | |
| "epoch": 3.0, | |
| "global_step": 402, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00013672921035335123, | |
| "loss": 2.5516, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00013532330330528217, | |
| "loss": 2.1537, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00013300157583778652, | |
| "loss": 2.0468, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00012979589515943672, | |
| "loss": 2.0981, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00012575026130737067, | |
| "loss": 1.9179, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0001209202032183722, | |
| "loss": 2.1116, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00011537201655836005, | |
| "loss": 1.9485, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00010918185377156764, | |
| "loss": 1.8987, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00010243467883907644, | |
| "loss": 1.9255, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.522310109331633e-05, | |
| "loss": 1.9168, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 8.764610409518253e-05, | |
| "loss": 1.8695, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 7.980768702075116e-05, | |
| "loss": 1.8724, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 7.181543720543789e-05, | |
| "loss": 1.854, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 6.377905343835293e-05, | |
| "loss": 1.833, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 5.580884027559068e-05, | |
| "loss": 1.8861, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.8014194038976637e-05, | |
| "loss": 1.9461, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.0502101280915654e-05, | |
| "loss": 1.8331, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.337567032488183e-05, | |
| "loss": 1.8838, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.673271603710427e-05, | |
| "loss": 1.8553, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.0664417254371463e-05, | |
| "loss": 1.7774, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.5254065295624607e-05, | |
| "loss": 1.7757, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.0575920734825916e-05, | |
| "loss": 1.7125, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 6.6941941266328415e-06, | |
| "loss": 1.848, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 3.66216467507622e-06, | |
| "loss": 1.8553, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.5214489420839643e-06, | |
| "loss": 1.9048, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.014296332977907e-07, | |
| "loss": 1.587, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.7584949731826782, | |
| "eval_runtime": 8.5759, | |
| "eval_samples_per_second": 22.738, | |
| "eval_steps_per_second": 2.915, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.885230146662962e-08, | |
| "loss": 1.6643, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 6.775955029229377e-07, | |
| "loss": 1.839, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 2.268617561892799e-06, | |
| "loss": 1.7973, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.7700806733338495e-06, | |
| "loss": 1.6847, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 8.147650640909638e-06, | |
| "loss": 1.9368, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.235496813600306e-05, | |
| "loss": 1.7733, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.7334285009434683e-05, | |
| "loss": 1.7367, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 2.3017256922096845e-05, | |
| "loss": 1.856, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 2.932588141513589e-05, | |
| "loss": 1.8208, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 3.6173568544064456e-05, | |
| "loss": 1.6543, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.3466329381658294e-05, | |
| "loss": 1.7295, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 5.110406607666898e-05, | |
| "loss": 1.7178, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.898194576146263e-05, | |
| "loss": 1.7418, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 6.699183945081632e-05, | |
| "loss": 1.6734, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 7.502380618205647e-05, | |
| "loss": 1.7512, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 8.296760202575751e-05, | |
| "loss": 1.7607, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.071419325485517e-05, | |
| "loss": 1.6486, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.815725290295059e-05, | |
| "loss": 1.7547, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.00010519462017057694, | |
| "loss": 1.773, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00011172970264813468, | |
| "loss": 1.7159, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.00011767280210908074, | |
| "loss": 1.6116, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.00012294234567600346, | |
| "loss": 1.6329, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.0001274660054610349, | |
| "loss": 1.6289, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.00013118169131281192, | |
| "loss": 1.7835, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.00013403840304389384, | |
| "loss": 1.7385, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.00013599693044126453, | |
| "loss": 1.6406, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.0001370303914518231, | |
| "loss": 1.7128, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.7059965133666992, | |
| "eval_runtime": 8.588, | |
| "eval_samples_per_second": 22.706, | |
| "eval_steps_per_second": 2.911, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.0001371246011559198, | |
| "loss": 1.731, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.0001362782664645371, | |
| "loss": 1.6293, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 0.00013450300386777127, | |
| "loss": 1.5935, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 0.00013182317999100535, | |
| "loss": 1.5613, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 0.00012827557714724304, | |
| "loss": 1.8451, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 0.000123908888476113, | |
| "loss": 1.7444, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.00011878304959908774, | |
| "loss": 1.6749, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 0.00011296841596437982, | |
| "loss": 1.5253, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.00010654479717298873, | |
| "loss": 1.6357, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 9.96003615403955e-05, | |
| "loss": 1.6636, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 9.223042592950526e-05, | |
| "loss": 1.646, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 8.453614746515988e-05, | |
| "loss": 1.7026, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 7.662313508728495e-05, | |
| "loss": 1.6289, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 6.860000000000001e-05, | |
| "loss": 1.4981, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 6.0576864912715095e-05, | |
| "loss": 1.5564, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 5.266385253484016e-05, | |
| "loss": 1.6555, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 4.496957407049471e-05, | |
| "loss": 1.5796, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 3.759963845960453e-05, | |
| "loss": 1.7131, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 3.065520282701126e-05, | |
| "loss": 1.61, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 2.423158403562016e-05, | |
| "loss": 1.6306, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.8416950400912332e-05, | |
| "loss": 1.5807, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.3291111523887019e-05, | |
| "loss": 1.5766, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 8.924422852757e-06, | |
| "loss": 1.6478, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 5.376820008994686e-06, | |
| "loss": 1.6091, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 2.6969961322287634e-06, | |
| "loss": 1.512, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 9.21733535462923e-07, | |
| "loss": 1.57, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 7.53988440801922e-08, | |
| "loss": 1.5282, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.536595344543457, | |
| "eval_runtime": 8.6245, | |
| "eval_samples_per_second": 22.726, | |
| "eval_steps_per_second": 2.899, | |
| "step": 402 | |
| } | |
| ], | |
| "max_steps": 402, | |
| "num_train_epochs": 3, | |
| "total_flos": 418067251200000.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |