| { | |
| "best_metric": 0.8933177022274327, | |
| "best_model_checkpoint": "./roberta_10ep-roles-3e-05/checkpoint-300", | |
| "epoch": 4.0, | |
| "eval_steps": 500, | |
| "global_step": 300, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 1.0, | |
| "eval_f1": 0.8915956151035322, | |
| "eval_loss": 0.03141865134239197, | |
| "eval_precision": 0.9336734693877551, | |
| "eval_recall": 0.8531468531468531, | |
| "eval_runtime": 7.0392, | |
| "eval_samples_per_second": 28.412, | |
| "eval_steps_per_second": 3.552, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_f1": 0.878048780487805, | |
| "eval_loss": 0.03434527665376663, | |
| "eval_precision": 0.8372093023255814, | |
| "eval_recall": 0.9230769230769231, | |
| "eval_runtime": 7.2469, | |
| "eval_samples_per_second": 27.598, | |
| "eval_steps_per_second": 3.45, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_f1": 0.8546824542518838, | |
| "eval_loss": 0.04272409528493881, | |
| "eval_precision": 0.794, | |
| "eval_recall": 0.9254079254079254, | |
| "eval_runtime": 7.4309, | |
| "eval_samples_per_second": 26.914, | |
| "eval_steps_per_second": 3.364, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_f1": 0.8933177022274327, | |
| "eval_loss": 0.03827732056379318, | |
| "eval_precision": 0.8985849056603774, | |
| "eval_recall": 0.8881118881118881, | |
| "eval_runtime": 7.1978, | |
| "eval_samples_per_second": 27.786, | |
| "eval_steps_per_second": 3.473, | |
| "step": 300 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 750, | |
| "num_train_epochs": 10, | |
| "save_steps": 500, | |
| "total_flos": 236841013410240.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |