| { | |
| "best_metric": 0.6668800115585327, | |
| "best_model_checkpoint": "./vit-base-age-classification/checkpoint-770", | |
| "epoch": 2.0, | |
| "eval_steps": 100, | |
| "global_step": 770, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019740259740259742, | |
| "loss": 1.678, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0001948051948051948, | |
| "loss": 1.5725, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019220779220779222, | |
| "loss": 1.4755, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00018961038961038963, | |
| "loss": 1.4616, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.000187012987012987, | |
| "loss": 1.4189, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00018441558441558442, | |
| "loss": 1.4155, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018181818181818183, | |
| "loss": 1.3158, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00017922077922077922, | |
| "loss": 1.5087, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00017662337662337663, | |
| "loss": 1.3685, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017402597402597401, | |
| "loss": 1.3054, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00017142857142857143, | |
| "loss": 1.3126, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016883116883116884, | |
| "loss": 1.3627, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00016623376623376625, | |
| "loss": 1.2071, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00016363636363636366, | |
| "loss": 1.2976, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00016103896103896104, | |
| "loss": 1.2975, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00015844155844155845, | |
| "loss": 1.3719, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00015584415584415587, | |
| "loss": 1.3048, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00015324675324675325, | |
| "loss": 1.1897, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00015064935064935066, | |
| "loss": 1.114, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00014805194805194807, | |
| "loss": 1.2517, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00014545454545454546, | |
| "loss": 1.1583, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00014285714285714287, | |
| "loss": 1.2316, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00014025974025974028, | |
| "loss": 1.2343, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00013766233766233766, | |
| "loss": 1.2562, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.00013506493506493507, | |
| "loss": 1.2292, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00013246753246753249, | |
| "loss": 1.1743, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00012987012987012987, | |
| "loss": 1.2493, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00012727272727272728, | |
| "loss": 1.1411, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00012467532467532467, | |
| "loss": 1.1051, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00012207792207792208, | |
| "loss": 1.0832, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00011948051948051949, | |
| "loss": 1.1295, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00011688311688311689, | |
| "loss": 1.1054, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.00011428571428571428, | |
| "loss": 1.1406, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00011168831168831168, | |
| "loss": 1.1486, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00010909090909090909, | |
| "loss": 1.1239, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.00010649350649350649, | |
| "loss": 1.0806, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00010389610389610389, | |
| "loss": 1.1452, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.0001012987012987013, | |
| "loss": 1.1069, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.620910788213329, | |
| "eval_loss": 0.9425380825996399, | |
| "eval_runtime": 112.3377, | |
| "eval_samples_per_second": 109.66, | |
| "eval_steps_per_second": 13.709, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 9.870129870129871e-05, | |
| "loss": 0.944, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.610389610389611e-05, | |
| "loss": 0.9775, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.35064935064935e-05, | |
| "loss": 0.9443, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.090909090909092e-05, | |
| "loss": 0.8547, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.831168831168831e-05, | |
| "loss": 0.8672, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.571428571428571e-05, | |
| "loss": 0.8914, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 8.311688311688312e-05, | |
| "loss": 0.9796, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 8.051948051948052e-05, | |
| "loss": 0.8171, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 7.792207792207793e-05, | |
| "loss": 0.9133, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 7.532467532467533e-05, | |
| "loss": 0.8922, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 7.272727272727273e-05, | |
| "loss": 0.9447, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 7.012987012987014e-05, | |
| "loss": 0.9278, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 6.753246753246754e-05, | |
| "loss": 0.9201, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 6.493506493506494e-05, | |
| "loss": 0.9333, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 6.233766233766233e-05, | |
| "loss": 0.9163, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 5.9740259740259744e-05, | |
| "loss": 0.8924, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.714285714285714e-05, | |
| "loss": 0.9369, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.4545454545454546e-05, | |
| "loss": 0.8954, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.1948051948051944e-05, | |
| "loss": 0.8396, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.9350649350649355e-05, | |
| "loss": 0.8785, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.675324675324675e-05, | |
| "loss": 0.8982, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.415584415584416e-05, | |
| "loss": 0.8245, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.155844155844156e-05, | |
| "loss": 0.8395, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 3.8961038961038966e-05, | |
| "loss": 0.8432, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 3.6363636363636364e-05, | |
| "loss": 0.8502, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.376623376623377e-05, | |
| "loss": 0.841, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.1168831168831166e-05, | |
| "loss": 0.8699, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.857142857142857e-05, | |
| "loss": 0.8441, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.5974025974025972e-05, | |
| "loss": 0.8556, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.3376623376623376e-05, | |
| "loss": 0.8814, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.077922077922078e-05, | |
| "loss": 0.96, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.8181818181818182e-05, | |
| "loss": 0.8597, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.5584415584415583e-05, | |
| "loss": 0.9041, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.2987012987012986e-05, | |
| "loss": 0.832, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.038961038961039e-05, | |
| "loss": 0.8599, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.792207792207792e-06, | |
| "loss": 0.7605, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.194805194805195e-06, | |
| "loss": 0.8849, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 2.5974025974025976e-06, | |
| "loss": 0.7527, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.8465, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.7656465622209595, | |
| "eval_loss": 0.6668800115585327, | |
| "eval_runtime": 111.2647, | |
| "eval_samples_per_second": 110.718, | |
| "eval_steps_per_second": 13.841, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 770, | |
| "total_flos": 1.9093674240032072e+18, | |
| "train_loss": 1.0707446556586724, | |
| "train_runtime": 668.48, | |
| "train_samples_per_second": 36.857, | |
| "train_steps_per_second": 1.152 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 770, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "total_flos": 1.9093674240032072e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |