| { | |
| "best_metric": 0.7207518220176448, | |
| "best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-crop-classification/checkpoint-366", | |
| "epoch": 1.9945504087193462, | |
| "eval_steps": 500, | |
| "global_step": 366, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.3513513513513515e-05, | |
| "loss": 1.7266, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 2.702702702702703e-05, | |
| "loss": 1.2951, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.0540540540540545e-05, | |
| "loss": 1.0586, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.954407294832827e-05, | |
| "loss": 0.9976, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.80243161094225e-05, | |
| "loss": 0.8893, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.650455927051672e-05, | |
| "loss": 0.8721, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.4984802431610946e-05, | |
| "loss": 0.8468, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.346504559270517e-05, | |
| "loss": 0.8606, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.1945288753799394e-05, | |
| "loss": 0.8196, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.0425531914893614e-05, | |
| "loss": 0.8142, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.890577507598784e-05, | |
| "loss": 0.8056, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.738601823708207e-05, | |
| "loss": 0.7827, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.5866261398176296e-05, | |
| "loss": 0.8085, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.4346504559270524e-05, | |
| "loss": 0.7601, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.2826747720364744e-05, | |
| "loss": 0.7839, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.130699088145897e-05, | |
| "loss": 0.739, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.9787234042553192e-05, | |
| "loss": 0.7497, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.826747720364742e-05, | |
| "loss": 0.7918, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.6988876102800153, | |
| "eval_loss": 0.7514234185218811, | |
| "eval_runtime": 64.6935, | |
| "eval_samples_per_second": 40.298, | |
| "eval_steps_per_second": 1.268, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 2.674772036474164e-05, | |
| "loss": 0.7871, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 2.5227963525835867e-05, | |
| "loss": 0.7642, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 2.3708206686930094e-05, | |
| "loss": 0.766, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.2188449848024318e-05, | |
| "loss": 0.7293, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.0668693009118542e-05, | |
| "loss": 0.7038, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.9148936170212766e-05, | |
| "loss": 0.7474, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.7629179331306993e-05, | |
| "loss": 0.7372, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.6109422492401217e-05, | |
| "loss": 0.7289, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.458966565349544e-05, | |
| "loss": 0.7612, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.3069908814589665e-05, | |
| "loss": 0.7186, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.1550151975683892e-05, | |
| "loss": 0.754, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.0030395136778116e-05, | |
| "loss": 0.7635, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.510638297872341e-06, | |
| "loss": 0.7229, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 6.990881458966565e-06, | |
| "loss": 0.7282, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 5.471124620060791e-06, | |
| "loss": 0.6946, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 3.951367781155015e-06, | |
| "loss": 0.73, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.43161094224924e-06, | |
| "loss": 0.7428, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 9.11854103343465e-07, | |
| "loss": 0.7053, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "eval_accuracy": 0.7207518220176448, | |
| "eval_loss": 0.6979314088821411, | |
| "eval_runtime": 64.3739, | |
| "eval_samples_per_second": 40.498, | |
| "eval_steps_per_second": 1.274, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "step": 366, | |
| "total_flos": 1.1638798169439007e+18, | |
| "train_loss": 0.8231194683762847, | |
| "train_runtime": 1541.7822, | |
| "train_samples_per_second": 30.434, | |
| "train_steps_per_second": 0.237 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 366, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "total_flos": 1.1638798169439007e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |