| { | |
| "best_metric": 0.6014132499694824, | |
| "best_model_checkpoint": "bertimbau_lr5e-5_b32_e4-finetuned-model\\checkpoint-1558", | |
| "epoch": 4.0, | |
| "global_step": 3116, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.197689345314506e-05, | |
| "loss": 0.8833, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.7870905587668593, | |
| "eval_loss": 0.6248906850814819, | |
| "eval_runtime": 8.5236, | |
| "eval_samples_per_second": 365.339, | |
| "eval_steps_per_second": 11.497, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 3.395378690629012e-05, | |
| "loss": 0.5969, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 2.5930680359435173e-05, | |
| "loss": 0.5395, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.7931920359666025, | |
| "eval_loss": 0.6014132499694824, | |
| "eval_runtime": 8.4209, | |
| "eval_samples_per_second": 369.796, | |
| "eval_steps_per_second": 11.638, | |
| "step": 1558 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.7907573812580232e-05, | |
| "loss": 0.4089, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.7996146435452793, | |
| "eval_loss": 0.6259180903434753, | |
| "eval_runtime": 8.5575, | |
| "eval_samples_per_second": 363.892, | |
| "eval_steps_per_second": 11.452, | |
| "step": 2337 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 9.884467265725289e-06, | |
| "loss": 0.3567, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 1.8613607188703468e-06, | |
| "loss": 0.2637, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.7980089916506101, | |
| "eval_loss": 0.6973580718040466, | |
| "eval_runtime": 8.4468, | |
| "eval_samples_per_second": 368.659, | |
| "eval_steps_per_second": 11.602, | |
| "step": 3116 | |
| } | |
| ], | |
| "max_steps": 3116, | |
| "num_train_epochs": 4, | |
| "total_flos": 3533921137489464.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |