{ "best_metric": 0.16979966395795365, "best_model_checkpoint": "distilbert-base-uncased-finetuned-cola/run-2/checkpoint-2138", "epoch": 1.0, "eval_steps": 500, "global_step": 2138, "is_hyper_param_search": true, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.23, "learning_rate": 5.6188127124741755e-05, "loss": 0.6368, "step": 500 }, { "epoch": 0.47, "learning_rate": 5.269904819250494e-05, "loss": 0.6054, "step": 1000 }, { "epoch": 0.7, "learning_rate": 4.920996926026812e-05, "loss": 0.6131, "step": 1500 }, { "epoch": 0.94, "learning_rate": 4.57208903280313e-05, "loss": 0.5971, "step": 2000 }, { "epoch": 1.0, "eval_loss": 0.6043549180030823, "eval_matthews_correlation": 0.16979966395795365, "eval_runtime": 0.8177, "eval_samples_per_second": 1275.601, "eval_steps_per_second": 80.719, "step": 2138 } ], "logging_steps": 500, "max_steps": 8552, "num_input_tokens_seen": 0, "num_train_epochs": 4, "save_steps": 500, "total_flos": 32693574929232.0, "train_batch_size": 4, "trial_name": null, "trial_params": { "learning_rate": 5.967720605697858e-05, "num_train_epochs": 4, "per_device_train_batch_size": 4, "seed": 37 } }