| { | |
| "best_metric": 0.9796511627906976, | |
| "best_model_checkpoint": "swin-tiny-patch4-window7-224-blank_img/checkpoint-288", | |
| "epoch": 4.989690721649485, | |
| "global_step": 360, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6.944444444444445e-06, | |
| "loss": 0.7256, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.388888888888889e-05, | |
| "loss": 0.6125, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 2.0833333333333336e-05, | |
| "loss": 0.4648, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.3041, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.472222222222222e-05, | |
| "loss": 0.2065, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 0.1469, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.8611111111111115e-05, | |
| "loss": 0.1734, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.938271604938271e-05, | |
| "loss": 0.1736, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.8611111111111115e-05, | |
| "loss": 0.1559, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.783950617283951e-05, | |
| "loss": 0.1355, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.70679012345679e-05, | |
| "loss": 0.0896, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.62962962962963e-05, | |
| "loss": 0.0994, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.5524691358024696e-05, | |
| "loss": 0.127, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.4753086419753084e-05, | |
| "loss": 0.1329, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "eval_accuracy": 0.9767441860465116, | |
| "eval_loss": 0.08817464113235474, | |
| "eval_runtime": 7.3528, | |
| "eval_samples_per_second": 140.354, | |
| "eval_steps_per_second": 4.488, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.3981481481481486e-05, | |
| "loss": 0.1512, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.3209876543209875e-05, | |
| "loss": 0.1248, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.243827160493827e-05, | |
| "loss": 0.1396, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 0.1231, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.089506172839506e-05, | |
| "loss": 0.1228, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.012345679012346e-05, | |
| "loss": 0.1277, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 3.935185185185186e-05, | |
| "loss": 0.1116, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 3.8580246913580246e-05, | |
| "loss": 0.1236, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 3.780864197530865e-05, | |
| "loss": 0.1254, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.7037037037037037e-05, | |
| "loss": 0.1004, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 3.626543209876543e-05, | |
| "loss": 0.1199, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 3.5493827160493834e-05, | |
| "loss": 0.1098, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.472222222222222e-05, | |
| "loss": 0.0787, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 3.395061728395062e-05, | |
| "loss": 0.1247, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "eval_accuracy": 0.9767441860465116, | |
| "eval_loss": 0.08047417551279068, | |
| "eval_runtime": 7.6368, | |
| "eval_samples_per_second": 135.135, | |
| "eval_steps_per_second": 4.321, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 3.317901234567901e-05, | |
| "loss": 0.1116, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 3.240740740740741e-05, | |
| "loss": 0.122, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.16358024691358e-05, | |
| "loss": 0.1109, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.08641975308642e-05, | |
| "loss": 0.109, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 3.0092592592592593e-05, | |
| "loss": 0.104, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.9320987654320992e-05, | |
| "loss": 0.0849, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 2.8549382716049384e-05, | |
| "loss": 0.1088, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.0988, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 2.700617283950617e-05, | |
| "loss": 0.1072, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 2.623456790123457e-05, | |
| "loss": 0.1098, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 2.5462962962962965e-05, | |
| "loss": 0.1288, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 2.4691358024691357e-05, | |
| "loss": 0.1401, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 2.3919753086419755e-05, | |
| "loss": 0.1237, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 2.314814814814815e-05, | |
| "loss": 0.1063, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 2.2376543209876542e-05, | |
| "loss": 0.0742, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "eval_accuracy": 0.9767441860465116, | |
| "eval_loss": 0.07210300117731094, | |
| "eval_runtime": 7.3216, | |
| "eval_samples_per_second": 140.953, | |
| "eval_steps_per_second": 4.507, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 2.1604938271604937e-05, | |
| "loss": 0.1008, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 2.0833333333333336e-05, | |
| "loss": 0.1196, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 2.006172839506173e-05, | |
| "loss": 0.0712, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 1.9290123456790123e-05, | |
| "loss": 0.0726, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 1.8518518518518518e-05, | |
| "loss": 0.0964, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 1.7746913580246917e-05, | |
| "loss": 0.0932, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 1.697530864197531e-05, | |
| "loss": 0.1022, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 1.6203703703703704e-05, | |
| "loss": 0.1033, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 1.54320987654321e-05, | |
| "loss": 0.0913, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 1.4660493827160496e-05, | |
| "loss": 0.0873, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 1.388888888888889e-05, | |
| "loss": 0.1095, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 1.3117283950617285e-05, | |
| "loss": 0.1047, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 1.2345679012345678e-05, | |
| "loss": 0.0707, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 1.1574074074074075e-05, | |
| "loss": 0.0745, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "eval_accuracy": 0.9796511627906976, | |
| "eval_loss": 0.07256749272346497, | |
| "eval_runtime": 6.7881, | |
| "eval_samples_per_second": 152.03, | |
| "eval_steps_per_second": 4.861, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 1.0802469135802469e-05, | |
| "loss": 0.129, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 1.0030864197530866e-05, | |
| "loss": 0.1298, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 9.259259259259259e-06, | |
| "loss": 0.089, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 8.487654320987654e-06, | |
| "loss": 0.0907, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 7.71604938271605e-06, | |
| "loss": 0.0983, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 6.944444444444445e-06, | |
| "loss": 0.0769, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 6.172839506172839e-06, | |
| "loss": 0.1115, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 5.401234567901234e-06, | |
| "loss": 0.058, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 4.6296296296296296e-06, | |
| "loss": 0.0972, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 3.858024691358025e-06, | |
| "loss": 0.1036, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 3.0864197530864196e-06, | |
| "loss": 0.0675, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 2.3148148148148148e-06, | |
| "loss": 0.0909, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 1.5432098765432098e-06, | |
| "loss": 0.1148, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 7.716049382716049e-07, | |
| "loss": 0.0802, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 0.0, | |
| "loss": 0.1289, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "eval_accuracy": 0.9728682170542635, | |
| "eval_loss": 0.08483530580997467, | |
| "eval_runtime": 6.8918, | |
| "eval_samples_per_second": 149.743, | |
| "eval_steps_per_second": 4.788, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "step": 360, | |
| "total_flos": 1.151826529604567e+18, | |
| "train_loss": 0.13382491601838006, | |
| "train_runtime": 783.9355, | |
| "train_samples_per_second": 59.195, | |
| "train_steps_per_second": 0.459 | |
| } | |
| ], | |
| "max_steps": 360, | |
| "num_train_epochs": 5, | |
| "total_flos": 1.151826529604567e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |