category-classifier-modernbert-trained / evaluation_results.json
ykerido's picture
Upload folder using huggingface_hub
a2b8d4d verified
{
"validation_accuracy": 0.8579234972677595,
"test_accuracy": 0.8754098360655738,
"validation_report": {
"biology": {
"precision": 0.8518518518518519,
"recall": 0.8518518518518519,
"f1-score": 0.8518518518518519,
"support": 108.0
},
"business": {
"precision": 0.868421052631579,
"recall": 0.8319327731092437,
"f1-score": 0.8497854077253219,
"support": 119.0
},
"chemistry": {
"precision": 0.8352941176470589,
"recall": 0.8352941176470589,
"f1-score": 0.8352941176470589,
"support": 170.0
},
"computer science": {
"precision": 0.9107142857142857,
"recall": 0.8360655737704918,
"f1-score": 0.8717948717948718,
"support": 61.0
},
"economics": {
"precision": 0.8615384615384616,
"recall": 0.8888888888888888,
"f1-score": 0.875,
"support": 126.0
},
"engineering": {
"precision": 0.8661971830985915,
"recall": 0.8482758620689655,
"f1-score": 0.8571428571428571,
"support": 145.0
},
"health": {
"precision": 0.8524590163934426,
"recall": 0.8455284552845529,
"f1-score": 0.8489795918367347,
"support": 123.0
},
"history": {
"precision": 0.9122807017543859,
"recall": 0.9122807017543859,
"f1-score": 0.9122807017543859,
"support": 57.0
},
"law": {
"precision": 0.9695121951219512,
"recall": 0.9636363636363636,
"f1-score": 0.9665653495440729,
"support": 165.0
},
"math": {
"precision": 0.8709677419354839,
"recall": 0.9310344827586207,
"f1-score": 0.9,
"support": 203.0
},
"other": {
"precision": 0.7668711656441718,
"recall": 0.7668711656441718,
"f1-score": 0.7668711656441718,
"support": 163.0
},
"philosophy": {
"precision": 0.8461538461538461,
"recall": 0.88,
"f1-score": 0.8627450980392157,
"support": 75.0
},
"physics": {
"precision": 0.7960199004975125,
"recall": 0.8205128205128205,
"f1-score": 0.8080808080808081,
"support": 195.0
},
"psychology": {
"precision": 0.8888888888888888,
"recall": 0.8,
"f1-score": 0.8421052631578947,
"support": 120.0
},
"accuracy": 0.8579234972677595,
"macro avg": {
"precision": 0.8640836006336795,
"recall": 0.8580123612091013,
"f1-score": 0.860606934587089,
"support": 1830.0
},
"weighted avg": {
"precision": 0.8584855863084755,
"recall": 0.8579234972677595,
"f1-score": 0.8578042125480061,
"support": 1830.0
}
},
"test_report": {
"biology": {
"precision": 0.9019607843137255,
"recall": 0.8598130841121495,
"f1-score": 0.8803827751196173,
"support": 107.0
},
"business": {
"precision": 0.8412698412698413,
"recall": 0.8983050847457628,
"f1-score": 0.8688524590163934,
"support": 118.0
},
"chemistry": {
"precision": 0.8647058823529412,
"recall": 0.8647058823529412,
"f1-score": 0.8647058823529412,
"support": 170.0
},
"computer science": {
"precision": 0.9193548387096774,
"recall": 0.9193548387096774,
"f1-score": 0.9193548387096774,
"support": 62.0
},
"economics": {
"precision": 0.8934426229508197,
"recall": 0.8582677165354331,
"f1-score": 0.8755020080321285,
"support": 127.0
},
"engineering": {
"precision": 0.8493150684931506,
"recall": 0.8493150684931506,
"f1-score": 0.8493150684931506,
"support": 146.0
},
"health": {
"precision": 0.9067796610169492,
"recall": 0.8699186991869918,
"f1-score": 0.8879668049792531,
"support": 123.0
},
"history": {
"precision": 0.8983050847457628,
"recall": 0.9298245614035088,
"f1-score": 0.9137931034482759,
"support": 57.0
},
"law": {
"precision": 0.9467455621301775,
"recall": 0.9696969696969697,
"f1-score": 0.9580838323353293,
"support": 165.0
},
"math": {
"precision": 0.9154228855721394,
"recall": 0.9108910891089109,
"f1-score": 0.913151364764268,
"support": 202.0
},
"other": {
"precision": 0.7771428571428571,
"recall": 0.8292682926829268,
"f1-score": 0.8023598820058997,
"support": 164.0
},
"philosophy": {
"precision": 0.8783783783783784,
"recall": 0.8666666666666667,
"f1-score": 0.87248322147651,
"support": 75.0
},
"physics": {
"precision": 0.8624338624338624,
"recall": 0.8358974358974359,
"f1-score": 0.8489583333333334,
"support": 195.0
},
"psychology": {
"precision": 0.8461538461538461,
"recall": 0.8319327731092437,
"f1-score": 0.8389830508474576,
"support": 119.0
},
"accuracy": 0.8754098360655738,
"macro avg": {
"precision": 0.8786722268331522,
"recall": 0.8781327259072694,
"f1-score": 0.8781351874938742,
"support": 1830.0
},
"weighted avg": {
"precision": 0.8761253032215991,
"recall": 0.8754098360655738,
"f1-score": 0.8755027073000429,
"support": 1830.0
}
},
"validation_confusion_matrix": [
[
92,
0,
0,
0,
1,
0,
7,
1,
0,
2,
4,
0,
0,
1
],
[
0,
99,
1,
0,
5,
1,
0,
0,
2,
3,
4,
2,
1,
1
],
[
1,
0,
142,
0,
0,
6,
1,
0,
0,
2,
1,
0,
17,
0
],
[
0,
0,
0,
51,
0,
3,
1,
0,
0,
5,
1,
0,
0,
0
],
[
0,
5,
0,
1,
112,
0,
1,
0,
0,
0,
2,
2,
1,
2
],
[
0,
0,
6,
1,
0,
123,
0,
0,
0,
4,
0,
0,
11,
0
],
[
7,
0,
2,
0,
1,
0,
104,
0,
0,
2,
4,
0,
0,
3
],
[
2,
0,
0,
0,
0,
0,
0,
52,
0,
0,
3,
0,
0,
0
],
[
0,
0,
0,
0,
0,
0,
1,
0,
159,
0,
3,
2,
0,
0
],
[
0,
2,
2,
1,
1,
1,
0,
0,
0,
189,
1,
0,
6,
0
],
[
3,
6,
1,
0,
8,
0,
2,
1,
3,
3,
125,
4,
5,
2
],
[
0,
2,
0,
0,
1,
0,
1,
2,
0,
0,
1,
66,
0,
2
],
[
0,
0,
16,
0,
0,
8,
0,
0,
0,
5,
5,
0,
160,
1
],
[
3,
0,
0,
2,
1,
0,
4,
1,
0,
2,
9,
2,
0,
96
]
],
"test_confusion_matrix": [
[
92,
1,
0,
0,
1,
1,
4,
1,
1,
0,
2,
0,
1,
3
],
[
0,
106,
0,
0,
3,
2,
0,
0,
0,
0,
2,
5,
0,
0
],
[
0,
0,
147,
0,
0,
9,
0,
0,
0,
3,
3,
0,
8,
0
],
[
0,
0,
1,
57,
0,
1,
0,
0,
0,
0,
3,
0,
0,
0
],
[
0,
10,
0,
0,
109,
0,
0,
0,
0,
1,
6,
0,
0,
1
],
[
1,
0,
4,
3,
0,
124,
0,
1,
0,
2,
0,
0,
11,
0
],
[
3,
0,
1,
0,
0,
1,
107,
0,
1,
0,
7,
0,
1,
2
],
[
1,
0,
0,
0,
0,
0,
1,
53,
0,
0,
2,
0,
0,
0
],
[
0,
1,
0,
0,
0,
0,
0,
0,
160,
0,
2,
2,
0,
0
],
[
0,
4,
1,
1,
2,
0,
0,
0,
1,
184,
3,
0,
3,
3
],
[
0,
4,
0,
1,
5,
0,
4,
1,
4,
1,
136,
1,
1,
6
],
[
0,
0,
0,
0,
1,
0,
0,
2,
2,
0,
3,
65,
0,
2
],
[
1,
0,
16,
0,
0,
8,
0,
1,
0,
4,
1,
0,
163,
1
],
[
4,
0,
0,
0,
1,
0,
2,
0,
0,
6,
5,
1,
1,
99
]
]
}