| { | |
| "best_metric": 0.7593896713615024, | |
| "best_model_checkpoint": "vit-base-patch16-224-type/checkpoint-560", | |
| "epoch": 9.959839357429718, | |
| "eval_steps": 500, | |
| "global_step": 620, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 8.064516129032258e-06, | |
| "loss": 2.3882, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.6129032258064517e-05, | |
| "loss": 2.2523, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.4193548387096777e-05, | |
| "loss": 2.0603, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.2258064516129034e-05, | |
| "loss": 1.8355, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.032258064516129e-05, | |
| "loss": 1.5709, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.8387096774193554e-05, | |
| "loss": 1.3494, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.6349765258215962, | |
| "eval_loss": 1.1001132726669312, | |
| "eval_runtime": 22.8557, | |
| "eval_samples_per_second": 74.555, | |
| "eval_steps_per_second": 2.363, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.92831541218638e-05, | |
| "loss": 1.1904, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 4.8387096774193554e-05, | |
| "loss": 1.139, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 4.74910394265233e-05, | |
| "loss": 1.0743, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.659498207885305e-05, | |
| "loss": 1.1071, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 4.56989247311828e-05, | |
| "loss": 1.0113, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 4.4802867383512545e-05, | |
| "loss": 0.9612, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "eval_accuracy": 0.6772300469483568, | |
| "eval_loss": 0.8708141446113586, | |
| "eval_runtime": 22.7556, | |
| "eval_samples_per_second": 74.883, | |
| "eval_steps_per_second": 2.373, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.390681003584229e-05, | |
| "loss": 0.9621, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 4.301075268817205e-05, | |
| "loss": 0.9291, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 4.2114695340501795e-05, | |
| "loss": 0.8999, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 4.121863799283154e-05, | |
| "loss": 0.9236, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.032258064516129e-05, | |
| "loss": 0.9358, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.9426523297491045e-05, | |
| "loss": 0.8817, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "eval_accuracy": 0.7265258215962441, | |
| "eval_loss": 0.7897992730140686, | |
| "eval_runtime": 23.0974, | |
| "eval_samples_per_second": 73.774, | |
| "eval_steps_per_second": 2.338, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 3.8530465949820786e-05, | |
| "loss": 0.9056, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 3.763440860215054e-05, | |
| "loss": 0.8122, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 3.673835125448029e-05, | |
| "loss": 0.7853, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 3.5842293906810036e-05, | |
| "loss": 0.8539, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.494623655913979e-05, | |
| "loss": 0.7822, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 3.405017921146954e-05, | |
| "loss": 0.8362, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.7276995305164319, | |
| "eval_loss": 0.7642938494682312, | |
| "eval_runtime": 22.8716, | |
| "eval_samples_per_second": 74.503, | |
| "eval_steps_per_second": 2.361, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 3.3154121863799286e-05, | |
| "loss": 0.806, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 3.2258064516129034e-05, | |
| "loss": 0.7131, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 3.136200716845878e-05, | |
| "loss": 0.7659, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 3.046594982078853e-05, | |
| "loss": 0.7285, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 2.9569892473118284e-05, | |
| "loss": 0.7295, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 2.8673835125448028e-05, | |
| "loss": 0.776, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.7959, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.7382629107981221, | |
| "eval_loss": 0.731025218963623, | |
| "eval_runtime": 22.8792, | |
| "eval_samples_per_second": 74.478, | |
| "eval_steps_per_second": 2.36, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 2.6881720430107527e-05, | |
| "loss": 0.7269, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 2.5985663082437278e-05, | |
| "loss": 0.6598, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 2.5089605734767026e-05, | |
| "loss": 0.6914, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 2.4193548387096777e-05, | |
| "loss": 0.6695, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 2.3297491039426525e-05, | |
| "loss": 0.6746, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 2.2401433691756272e-05, | |
| "loss": 0.6765, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "eval_accuracy": 0.7470657276995305, | |
| "eval_loss": 0.7246695756912231, | |
| "eval_runtime": 22.9912, | |
| "eval_samples_per_second": 74.115, | |
| "eval_steps_per_second": 2.349, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 2.1505376344086024e-05, | |
| "loss": 0.6417, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 6.27, | |
| "learning_rate": 2.060931899641577e-05, | |
| "loss": 0.633, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.9713261648745522e-05, | |
| "loss": 0.6314, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "learning_rate": 1.881720430107527e-05, | |
| "loss": 0.6661, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "learning_rate": 1.7921146953405018e-05, | |
| "loss": 0.6203, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 1.702508960573477e-05, | |
| "loss": 0.6504, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "eval_accuracy": 0.7576291079812206, | |
| "eval_loss": 0.6938906311988831, | |
| "eval_runtime": 22.8151, | |
| "eval_samples_per_second": 74.687, | |
| "eval_steps_per_second": 2.367, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 1.6129032258064517e-05, | |
| "loss": 0.5945, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 1.5232974910394265e-05, | |
| "loss": 0.5912, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 1.4336917562724014e-05, | |
| "loss": 0.5645, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 7.55, | |
| "learning_rate": 1.3440860215053763e-05, | |
| "loss": 0.5811, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 1.2544802867383513e-05, | |
| "loss": 0.6433, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "learning_rate": 1.1648745519713262e-05, | |
| "loss": 0.5846, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_accuracy": 0.7576291079812206, | |
| "eval_loss": 0.6983180046081543, | |
| "eval_runtime": 22.9755, | |
| "eval_samples_per_second": 74.166, | |
| "eval_steps_per_second": 2.35, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 1.0752688172043012e-05, | |
| "loss": 0.6193, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 9.856630824372761e-06, | |
| "loss": 0.6121, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 8.35, | |
| "learning_rate": 8.960573476702509e-06, | |
| "loss": 0.5985, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "learning_rate": 8.064516129032258e-06, | |
| "loss": 0.554, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 7.168458781362007e-06, | |
| "loss": 0.5987, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "learning_rate": 6.2724014336917564e-06, | |
| "loss": 0.5881, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 5.376344086021506e-06, | |
| "loss": 0.5774, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_accuracy": 0.7593896713615024, | |
| "eval_loss": 0.6935292482376099, | |
| "eval_runtime": 23.2143, | |
| "eval_samples_per_second": 73.403, | |
| "eval_steps_per_second": 2.326, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 4.4802867383512545e-06, | |
| "loss": 0.5731, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 3.5842293906810035e-06, | |
| "loss": 0.5458, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 9.48, | |
| "learning_rate": 2.688172043010753e-06, | |
| "loss": 0.5373, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 1.7921146953405017e-06, | |
| "loss": 0.5995, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 9.8, | |
| "learning_rate": 8.960573476702509e-07, | |
| "loss": 0.5283, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "learning_rate": 0.0, | |
| "loss": 0.5749, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "eval_accuracy": 0.7570422535211268, | |
| "eval_loss": 0.6881020665168762, | |
| "eval_runtime": 22.8182, | |
| "eval_samples_per_second": 74.677, | |
| "eval_steps_per_second": 2.367, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "step": 620, | |
| "total_flos": 6.145965294585532e+18, | |
| "train_loss": 0.8511337226436985, | |
| "train_runtime": 3069.2061, | |
| "train_samples_per_second": 25.942, | |
| "train_steps_per_second": 0.202 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 620, | |
| "num_train_epochs": 10, | |
| "save_steps": 500, | |
| "total_flos": 6.145965294585532e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |