| { |
| "eval_loss": 0.09040758013725281, |
| "eval_exact_match_accuracy": 0.46424622189458165, |
| "eval_precision_micro": 0.6286882332029862, |
| "eval_recall_micro": 0.5543887147335423, |
| "eval_f1_micro": 0.5892053973013494, |
| "eval_precision_macro": 0.5876411237797338, |
| "eval_recall_macro": 0.454800127114999, |
| "eval_f1_macro": 0.49235331483739386, |
| "eval_classification_report": { |
| "admiration": { |
| "precision": 0.7203389830508474, |
| "recall": 0.6967213114754098, |
| "f1-score": 0.7083333333333334, |
| "support": 488.0 |
| }, |
| "amusement": { |
| "precision": 0.7391304347826086, |
| "recall": 0.8415841584158416, |
| "f1-score": 0.7870370370370371, |
| "support": 303.0 |
| }, |
| "anger": { |
| "precision": 0.53125, |
| "recall": 0.5230769230769231, |
| "f1-score": 0.5271317829457365, |
| "support": 195.0 |
| }, |
| "annoyance": { |
| "precision": 0.416988416988417, |
| "recall": 0.3564356435643564, |
| "f1-score": 0.38434163701067614, |
| "support": 303.0 |
| }, |
| "approval": { |
| "precision": 0.4690265486725664, |
| "recall": 0.26700251889168763, |
| "f1-score": 0.3402889245585875, |
| "support": 397.0 |
| }, |
| "caring": { |
| "precision": 0.6129032258064516, |
| "recall": 0.37254901960784315, |
| "f1-score": 0.4634146341463415, |
| "support": 153.0 |
| }, |
| "confusion": { |
| "precision": 0.6086956521739131, |
| "recall": 0.3684210526315789, |
| "f1-score": 0.45901639344262296, |
| "support": 152.0 |
| }, |
| "curiosity": { |
| "precision": 0.46206896551724136, |
| "recall": 0.5403225806451613, |
| "f1-score": 0.49814126394052044, |
| "support": 248.0 |
| }, |
| "desire": { |
| "precision": 0.6190476190476191, |
| "recall": 0.5064935064935064, |
| "f1-score": 0.5571428571428572, |
| "support": 77.0 |
| }, |
| "disappointment": { |
| "precision": 0.43137254901960786, |
| "recall": 0.26993865030674846, |
| "f1-score": 0.3320754716981132, |
| "support": 163.0 |
| }, |
| "disapproval": { |
| "precision": 0.45064377682403434, |
| "recall": 0.3595890410958904, |
| "f1-score": 0.4, |
| "support": 292.0 |
| }, |
| "disgust": { |
| "precision": 0.5405405405405406, |
| "recall": 0.41237113402061853, |
| "f1-score": 0.4678362573099415, |
| "support": 97.0 |
| }, |
| "embarrassment": { |
| "precision": 0.6923076923076923, |
| "recall": 0.5142857142857142, |
| "f1-score": 0.5901639344262295, |
| "support": 35.0 |
| }, |
| "excitement": { |
| "precision": 0.42857142857142855, |
| "recall": 0.3125, |
| "f1-score": 0.3614457831325301, |
| "support": 96.0 |
| }, |
| "fear": { |
| "precision": 0.6857142857142857, |
| "recall": 0.5333333333333333, |
| "f1-score": 0.6, |
| "support": 90.0 |
| }, |
| "gratitude": { |
| "precision": 0.9300291545189504, |
| "recall": 0.8910614525139665, |
| "f1-score": 0.9101283880171184, |
| "support": 358.0 |
| }, |
| "grief": { |
| "precision": 0.0, |
| "recall": 0.0, |
| "f1-score": 0.0, |
| "support": 13.0 |
| }, |
| "joy": { |
| "precision": 0.5695364238410596, |
| "recall": 0.5, |
| "f1-score": 0.5325077399380805, |
| "support": 172.0 |
| }, |
| "love": { |
| "precision": 0.7674418604651163, |
| "recall": 0.7857142857142857, |
| "f1-score": 0.7764705882352941, |
| "support": 252.0 |
| }, |
| "nervousness": { |
| "precision": 0.4166666666666667, |
| "recall": 0.23809523809523808, |
| "f1-score": 0.30303030303030304, |
| "support": 21.0 |
| }, |
| "optimism": { |
| "precision": 0.6792452830188679, |
| "recall": 0.5167464114832536, |
| "f1-score": 0.5869565217391305, |
| "support": 209.0 |
| }, |
| "pride": { |
| "precision": 0.8333333333333334, |
| "recall": 0.3333333333333333, |
| "f1-score": 0.47619047619047616, |
| "support": 15.0 |
| }, |
| "realization": { |
| "precision": 0.42857142857142855, |
| "recall": 0.2125984251968504, |
| "f1-score": 0.28421052631578947, |
| "support": 127.0 |
| }, |
| "relief": { |
| "precision": 1.0, |
| "recall": 0.05555555555555555, |
| "f1-score": 0.10526315789473684, |
| "support": 18.0 |
| }, |
| "remorse": { |
| "precision": 0.7391304347826086, |
| "recall": 0.5, |
| "f1-score": 0.5964912280701754, |
| "support": 68.0 |
| }, |
| "sadness": { |
| "precision": 0.5314685314685315, |
| "recall": 0.5314685314685315, |
| "f1-score": 0.5314685314685315, |
| "support": 143.0 |
| }, |
| "surprise": { |
| "precision": 0.48044692737430167, |
| "recall": 0.6666666666666666, |
| "f1-score": 0.5584415584415584, |
| "support": 129.0 |
| }, |
| "neutral": { |
| "precision": 0.669481302774427, |
| "recall": 0.6285390713476784, |
| "f1-score": 0.6483644859813084, |
| "support": 1766.0 |
| }, |
| "micro avg": { |
| "precision": 0.6286882332029862, |
| "recall": 0.5543887147335423, |
| "f1-score": 0.5892053973013494, |
| "support": 6380.0 |
| }, |
| "macro avg": { |
| "precision": 0.5876411237797338, |
| "recall": 0.454800127114999, |
| "f1-score": 0.49235331483739386, |
| "support": 6380.0 |
| }, |
| "weighted avg": { |
| "precision": 0.6194504501264384, |
| "recall": 0.5543887147335423, |
| "f1-score": 0.5787029045585259, |
| "support": 6380.0 |
| }, |
| "samples avg": { |
| "precision": 0.5884629561371176, |
| "recall": 0.5802309866076913, |
| "f1-score": 0.5711635336036368, |
| "support": 6380.0 |
| } |
| }, |
| "eval_runtime": 7.5881, |
| "eval_samples_per_second": 715.066, |
| "eval_steps_per_second": 44.807, |
| "epoch": 6.0, |
| "timestamp": "2025-04-06 00:27:38" |
| } |