| { |
| "num_labels": 7, |
| "label2id": { |
| "confusion": 0, |
| "curiosity": 1, |
| "positive_arousal": 2, |
| "relief": 3, |
| "sadness": 4, |
| "threat": 5, |
| "warmth": 6 |
| }, |
| "id2label": { |
| "0": "confusion", |
| "1": "curiosity", |
| "2": "positive_arousal", |
| "3": "relief", |
| "4": "sadness", |
| "5": "threat", |
| "6": "warmth" |
| }, |
| "threshold_global": 0.5000000000000001, |
| "threshold_per_class": { |
| "confusion": 0.3500000000000001, |
| "curiosity": 0.45000000000000007, |
| "positive_arousal": 0.5000000000000001, |
| "relief": 0.7500000000000002, |
| "sadness": 0.6500000000000001, |
| "threat": 0.40000000000000013, |
| "warmth": 0.3500000000000001 |
| }, |
| "problem_type": "multi_label_classification", |
| "large_model": "roberta-large", |
| "pos_weight_strategy": "sqrt_dampened_clamp5", |
| "apply_threshold_fallback": false, |
| "test_metrics_global": { |
| "exact_match": 0.29183673469387755, |
| "f1_macro": 0.6869802060287059, |
| "precision_macro": 0.6823835062656095, |
| "recall_macro": 0.7091260386025349 |
| }, |
| "test_metrics_perclass": { |
| "exact_match": 0.2816326530612245, |
| "f1_macro": 0.7041873803719186, |
| "precision_macro": 0.6793365465441952, |
| "recall_macro": 0.7477027429950918 |
| } |
| } |