| { | |
| "best_metric": 0.6505, | |
| "best_model_checkpoint": "huggingface_swinv2/results/checkpoint-474", | |
| "epoch": 3.0, | |
| "global_step": 474, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.0416666666666668e-05, | |
| "loss": 6.9591, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2.0833333333333336e-05, | |
| "loss": 6.2275, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 3.125e-05, | |
| "loss": 5.4598, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 5.0493, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.976525821596245e-05, | |
| "loss": 4.441, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.8591549295774653e-05, | |
| "loss": 4.2344, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.741784037558686e-05, | |
| "loss": 3.9691, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.624413145539906e-05, | |
| "loss": 3.7754, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.507042253521127e-05, | |
| "loss": 3.5397, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.389671361502348e-05, | |
| "loss": 3.6703, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.2723004694835684e-05, | |
| "loss": 3.3699, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.154929577464789e-05, | |
| "loss": 3.1505, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.037558685446009e-05, | |
| "loss": 3.163, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.9201877934272305e-05, | |
| "loss": 2.9571, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.802816901408451e-05, | |
| "loss": 2.8495, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.5525, | |
| "eval_loss": 2.3313448429107666, | |
| "eval_runtime": 296.5387, | |
| "eval_samples_per_second": 6.744, | |
| "eval_steps_per_second": 0.212, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.6854460093896714e-05, | |
| "loss": 2.7472, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 3.568075117370892e-05, | |
| "loss": 2.2962, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 3.450704225352113e-05, | |
| "loss": 2.3374, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 2.3174, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 3.215962441314554e-05, | |
| "loss": 2.3778, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.0985915492957744e-05, | |
| "loss": 2.1876, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 2.9812206572769952e-05, | |
| "loss": 2.2015, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.8638497652582164e-05, | |
| "loss": 2.11, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.746478873239437e-05, | |
| "loss": 2.0703, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.6291079812206577e-05, | |
| "loss": 2.0761, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.511737089201878e-05, | |
| "loss": 1.925, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.3943661971830986e-05, | |
| "loss": 1.9786, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.2769953051643194e-05, | |
| "loss": 2.0874, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 2.1596244131455402e-05, | |
| "loss": 1.9243, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.0422535211267607e-05, | |
| "loss": 1.907, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.9248826291079812e-05, | |
| "loss": 1.8825, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.623, | |
| "eval_loss": 1.6814827919006348, | |
| "eval_runtime": 239.2713, | |
| "eval_samples_per_second": 8.359, | |
| "eval_steps_per_second": 0.263, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.807511737089202e-05, | |
| "loss": 1.8254, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.6901408450704224e-05, | |
| "loss": 1.7649, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.5727699530516433e-05, | |
| "loss": 1.5449, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.4553990610328639e-05, | |
| "loss": 1.5033, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.3380281690140845e-05, | |
| "loss": 1.7012, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.2206572769953052e-05, | |
| "loss": 1.6202, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.1032863849765258e-05, | |
| "loss": 1.6755, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 9.859154929577465e-06, | |
| "loss": 1.6227, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 8.685446009389673e-06, | |
| "loss": 1.5438, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 7.511737089201878e-06, | |
| "loss": 1.5294, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 6.338028169014085e-06, | |
| "loss": 1.5591, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 5.164319248826292e-06, | |
| "loss": 1.5766, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.990610328638498e-06, | |
| "loss": 1.5316, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 2.8169014084507042e-06, | |
| "loss": 1.6038, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.643192488262911e-06, | |
| "loss": 1.4692, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 4.694835680751174e-07, | |
| "loss": 1.4956, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.6505, | |
| "eval_loss": 1.5247734785079956, | |
| "eval_runtime": 211.3118, | |
| "eval_samples_per_second": 9.465, | |
| "eval_steps_per_second": 0.298, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 474, | |
| "total_flos": 7.918476182904914e+18, | |
| "train_loss": 2.6032654142580958, | |
| "train_runtime": 22914.0929, | |
| "train_samples_per_second": 2.646, | |
| "train_steps_per_second": 0.021 | |
| } | |
| ], | |
| "max_steps": 474, | |
| "num_train_epochs": 3, | |
| "total_flos": 7.918476182904914e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |