| { | |
| "best_metric": 0.9966480446927374, | |
| "best_model_checkpoint": "swin-tiny-patch4-window7-224-LongSleeveCleanedData/checkpoint-575", | |
| "epoch": 4.97020854021847, | |
| "global_step": 715, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.985855728429986e-05, | |
| "loss": 1.0641, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.915134370579915e-05, | |
| "loss": 0.7318, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.844413012729845e-05, | |
| "loss": 0.4442, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.773691654879774e-05, | |
| "loss": 0.38, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.702970297029703e-05, | |
| "loss": 0.2757, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.632248939179633e-05, | |
| "loss": 0.2418, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.561527581329561e-05, | |
| "loss": 0.2242, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.490806223479491e-05, | |
| "loss": 0.2094, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.42008486562942e-05, | |
| "loss": 0.2175, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.3493635077793494e-05, | |
| "loss": 0.1794, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.278642149929279e-05, | |
| "loss": 0.147, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.207920792079208e-05, | |
| "loss": 0.1538, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.1371994342291374e-05, | |
| "loss": 0.1365, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.0664780763790665e-05, | |
| "loss": 0.1802, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "eval_accuracy": 0.9597765363128492, | |
| "eval_loss": 0.1150519847869873, | |
| "eval_runtime": 220.0966, | |
| "eval_samples_per_second": 4.066, | |
| "eval_steps_per_second": 0.509, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 3.9957567185289956e-05, | |
| "loss": 0.1855, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.9250353606789254e-05, | |
| "loss": 0.1416, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.8543140028288545e-05, | |
| "loss": 0.1621, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 3.783592644978784e-05, | |
| "loss": 0.1769, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.712871287128713e-05, | |
| "loss": 0.1404, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 3.642149929278642e-05, | |
| "loss": 0.139, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 3.571428571428572e-05, | |
| "loss": 0.138, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 3.500707213578501e-05, | |
| "loss": 0.1705, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 3.4299858557284306e-05, | |
| "loss": 0.1272, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 3.35926449787836e-05, | |
| "loss": 0.1326, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 3.288543140028288e-05, | |
| "loss": 0.1268, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 3.217821782178218e-05, | |
| "loss": 0.1383, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 3.147100424328147e-05, | |
| "loss": 0.1043, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 3.076379066478077e-05, | |
| "loss": 0.0836, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.994413407821229, | |
| "eval_loss": 0.020183874294161797, | |
| "eval_runtime": 22.3499, | |
| "eval_samples_per_second": 40.045, | |
| "eval_steps_per_second": 5.011, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 3.0056577086280057e-05, | |
| "loss": 0.0581, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 2.9349363507779348e-05, | |
| "loss": 0.1456, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 2.8642149929278646e-05, | |
| "loss": 0.1157, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 2.7934936350777934e-05, | |
| "loss": 0.1005, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.722772277227723e-05, | |
| "loss": 0.0868, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.6520509193776523e-05, | |
| "loss": 0.1223, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 2.581329561527581e-05, | |
| "loss": 0.1164, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 2.510608203677511e-05, | |
| "loss": 0.081, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 2.43988684582744e-05, | |
| "loss": 0.076, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 2.369165487977369e-05, | |
| "loss": 0.0827, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 2.2984441301272985e-05, | |
| "loss": 0.0734, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 2.227722772277228e-05, | |
| "loss": 0.1122, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 2.157001414427157e-05, | |
| "loss": 0.1241, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 2.0862800565770862e-05, | |
| "loss": 0.1051, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 2.0155586987270157e-05, | |
| "loss": 0.1186, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.994413407821229, | |
| "eval_loss": 0.01652395911514759, | |
| "eval_runtime": 23.7611, | |
| "eval_samples_per_second": 37.667, | |
| "eval_steps_per_second": 4.714, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 1.9448373408769448e-05, | |
| "loss": 0.0954, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 1.8741159830268743e-05, | |
| "loss": 0.066, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 1.8033946251768037e-05, | |
| "loss": 0.1178, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 1.7326732673267325e-05, | |
| "loss": 0.091, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 1.661951909476662e-05, | |
| "loss": 0.0954, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 1.5912305516265914e-05, | |
| "loss": 0.0845, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 1.5205091937765206e-05, | |
| "loss": 0.0953, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 1.44978783592645e-05, | |
| "loss": 0.108, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 1.379066478076379e-05, | |
| "loss": 0.0578, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 1.3083451202263084e-05, | |
| "loss": 0.1044, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 1.2376237623762377e-05, | |
| "loss": 0.0842, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 1.166902404526167e-05, | |
| "loss": 0.0716, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 1.0961810466760961e-05, | |
| "loss": 0.0739, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 1.0254596888260256e-05, | |
| "loss": 0.08, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.9966480446927374, | |
| "eval_loss": 0.010998690500855446, | |
| "eval_runtime": 23.7024, | |
| "eval_samples_per_second": 37.76, | |
| "eval_steps_per_second": 4.725, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 9.547383309759547e-06, | |
| "loss": 0.0684, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 8.84016973125884e-06, | |
| "loss": 0.0694, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 8.132956152758134e-06, | |
| "loss": 0.0663, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 7.4257425742574256e-06, | |
| "loss": 0.0694, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 6.718528995756719e-06, | |
| "loss": 0.0776, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 6.011315417256011e-06, | |
| "loss": 0.0785, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 5.304101838755304e-06, | |
| "loss": 0.0773, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 4.596888260254597e-06, | |
| "loss": 0.06, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 3.889674681753889e-06, | |
| "loss": 0.0647, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 3.182461103253183e-06, | |
| "loss": 0.0821, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 2.4752475247524753e-06, | |
| "loss": 0.0589, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 1.768033946251768e-06, | |
| "loss": 0.0604, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 1.0608203677510609e-06, | |
| "loss": 0.0413, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 3.536067892503536e-07, | |
| "loss": 0.0575, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "eval_accuracy": 0.9955307262569832, | |
| "eval_loss": 0.012482840567827225, | |
| "eval_runtime": 24.0149, | |
| "eval_samples_per_second": 37.269, | |
| "eval_steps_per_second": 4.664, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "step": 715, | |
| "total_flos": 9.948639012864492e+17, | |
| "train_loss": 0.1435624466075764, | |
| "train_runtime": 3458.6134, | |
| "train_samples_per_second": 11.641, | |
| "train_steps_per_second": 0.207 | |
| } | |
| ], | |
| "max_steps": 715, | |
| "num_train_epochs": 5, | |
| "total_flos": 9.948639012864492e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |