| { |
| "num_labels": 7, |
| "label2id": { |
| "confusion": 0, |
| "curiosity": 1, |
| "positive_arousal": 2, |
| "relief": 3, |
| "sadness": 4, |
| "threat": 5, |
| "warmth": 6 |
| }, |
| "id2label": { |
| "0": "confusion", |
| "1": "curiosity", |
| "2": "positive_arousal", |
| "3": "relief", |
| "4": "sadness", |
| "5": "threat", |
| "6": "warmth" |
| }, |
| "threshold_global": 0.5500000000000002, |
| "threshold_per_class": { |
| "confusion": 0.5500000000000002, |
| "curiosity": 0.40000000000000013, |
| "positive_arousal": 0.6500000000000001, |
| "relief": 0.5500000000000002, |
| "sadness": 0.5000000000000001, |
| "threat": 0.15000000000000002, |
| "warmth": 0.5000000000000001 |
| }, |
| "problem_type": "multi_label_classification", |
| "pos_weight_strategy": "sqrt_dampened_clamp5", |
| "apply_threshold_fallback": false, |
| "test_metrics_global": { |
| "exact_match": 0.3326530612244898, |
| "f1_macro": 0.6967823591742103, |
| "precision_macro": 0.7168987173854955, |
| "recall_macro": 0.6911034768719782 |
| }, |
| "test_metrics_perclass": { |
| "exact_match": 0.27755102040816326, |
| "f1_macro": 0.7036221934372183, |
| "precision_macro": 0.6890624904270094, |
| "recall_macro": 0.7407960688549154 |
| } |
| } |