| { | |
| "best_epoch": 6, | |
| "best_score": 0.8378675253543592, | |
| "best_selection_score": 0.8378675253543592, | |
| "best_val_metrics_at_selection": { | |
| "acc": 0.8910891089108911, | |
| "f1": 0.8332003192338387, | |
| "precision": 0.7687776141384389, | |
| "recall": 0.9094076655052264 | |
| }, | |
| "model_selection": { | |
| "metric": "recall-floor-constrained-fbeta", | |
| "beta": 0.5, | |
| "recall_floor": 0.9, | |
| "recall_floor_penalty": 2.0, | |
| "acc_weight": 0.05 | |
| }, | |
| "data_provenance": { | |
| "train_csv": { | |
| "path": "data_for_deberta/Risk-only-pick/task1_risk_training_20260225.csv", | |
| "sha256_16": "149b65cfd4785835", | |
| "size_bytes": 61496472 | |
| }, | |
| "val_csv_source": { | |
| "path": "data_for_deberta/dedup_split/risk_val.csv", | |
| "sha256_16": "61c445d1558d1b34", | |
| "size_bytes": 6044780 | |
| }, | |
| "final_eval_csv": { | |
| "path": "data_for_deberta/dedup_split/risk_test.csv", | |
| "sha256_16": "7bb9f6e80cd447ae", | |
| "size_bytes": 9345127 | |
| } | |
| }, | |
| "train_rows": 17206, | |
| "val_rows": 1919, | |
| "final_eval_rows": 2879, | |
| "val_metrics": { | |
| "acc": 0.8910891089108911, | |
| "f1": 0.8332003192338387, | |
| "precision": 0.7687776141384389, | |
| "recall": 0.9094076655052264 | |
| }, | |
| "final_eval_metrics": { | |
| "acc": 0.9048280653004516, | |
| "f1": 0.8477777777777777, | |
| "precision": 0.7730496453900709, | |
| "recall": 0.9384993849938499 | |
| }, | |
| "final_eval_baseline": { | |
| "always_pick_acc": 0.28238971865230983, | |
| "always_decline_acc": 0.7176102813476901 | |
| } | |
| } |