schema_version stringclasses 1 value | model dict | evaluation dict | results dict |
|---|---|---|---|
1.0 | {
"name": "Qwen3-Embedding-0.6B",
"model_type": "embedding",
"params": "600M",
"revision": "unknown",
"url": "https://huggingface.co/Qwen3-Embedding-0.6B"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.579734,
"accuracy": 0.60678,
"macro_precision": 0.630595,
"macro_recall": 0.641596
},
"by_task": {
"sentiment": {
"macro_f1": 0.810295,
"accuracy": 0.803694,
"macro_precision": 0.832307,
"macro_recall": 0.837046
},
"emotion": {
"macro_f1": 0.43104,
"accuracy": 0.465,
"macro_precision": 0.457807,
"macro_recall": 0.47917
},
"intent": {
"macro_f1": 0.56282,
"accuracy": 0.599,
"macro_precision": 0.593036,
"macro_recall": 0.613298
},
"topic": {
"macro_f1": 0.485622,
"accuracy": 0.527273,
"macro_precision": 0.56223,
"macro_recall": 0.572237
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9028522372245789,
"accuracy": 0.902999997138977,
"macro_precision": 0.9034148454666138,
"macro_recall": 0.9025961756706238
},
"imdb": {
"macro_f1": 0.8802140951156616,
"accuracy": 0.8809999823570251,
"macro_precision": 0.8863784074783325,
"macro_recall": 0.8794885277748108
},
"appreviews": {
"macro_f1": 0.8688739538192749,
"accuracy": 0.8690000176429749,
"macro_precision": 0.8710983991622925,
"macro_recall": 0.8693036437034607
},
"yelpreviews": {
"macro_f1": 0.9619925618171692,
"accuracy": 0.9620000123977661,
"macro_precision": 0.9619758129119873,
"macro_recall": 0.9622864723205566
},
"rottentomatoes": {
"macro_f1": 0.7592573165893555,
"accuracy": 0.7607879638671875,
"macro_precision": 0.767593502998352,
"macro_recall": 0.7607879638671875
},
"financialphrasebank": {
"macro_f1": 0.488581120967865,
"accuracy": 0.4463768005371094,
"macro_precision": 0.6033790707588196,
"macro_recall": 0.6478113532066345
},
"emotiondair": {
"macro_f1": 0.47812420129776,
"accuracy": 0.527999997138977,
"macro_precision": 0.484048992395401,
"macro_recall": 0.546855092048645
},
"empathetic": {
"macro_f1": 0.38395485281944275,
"accuracy": 0.4020000100135803,
"macro_precision": 0.4315657615661621,
"macro_recall": 0.4114857316017151
},
"banking77": {
"macro_f1": 0.6418111324310303,
"accuracy": 0.6389999985694885,
"macro_precision": 0.6807952523231506,
"macro_recall": 0.6659894585609436
},
"biasframes_intent": {
"macro_f1": 0.4549930691719055,
"accuracy": 0.527999997138977,
"macro_precision": 0.5053926706314087,
"macro_recall": 0.5030898451805115
},
"massive": {
"macro_f1": 0.5916567444801331,
"accuracy": 0.6299999952316284,
"macro_precision": 0.5929194092750549,
"macro_recall": 0.6708149313926697
},
"agnews": {
"macro_f1": 0.6595149636268616,
"accuracy": 0.6800000071525574,
"macro_precision": 0.6822134852409363,
"macro_recall": 0.6807345151901245
},
"yahootopics": {
"macro_f1": 0.5451348423957825,
"accuracy": 0.6010000109672546,
"macro_precision": 0.5679214596748352,
"macro_recall": 0.5439555644989014
},
"trueteacher": {
"macro_f1": 0.47297418117523193,
"accuracy": 0.4729999899864197,
"macro_precision": 0.4729878902435303,
"macro_recall": 0.4729841351509094
},
"manifesto": {
"macro_f1": 0.24359840154647827,
"accuracy": 0.40400001406669617,
"macro_precision": 0.27557799220085144,
"macro_recall": 0.2821764349937439
},
"capsotu": {
"macro_f1": 0.5322523713111877,
"accuracy": 0.546999990940094,
"macro_precision": 0.5408480167388916,
"macro_recall": 0.6490484476089478
},
"biasframes_offensive": {
"macro_f1": 0.524954080581665,
"accuracy": 0.5630000233650208,
"macro_precision": 0.5418699383735657,
"macro_recall": 0.5346786975860596
},
"biasframes_sex": {
"macro_f1": 0.32174259424209595,
"accuracy": 0.3779999911785126,
"macro_precision": 0.5156175494194031,
"macro_recall": 0.56468266248703
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.6551154851913452,
"accuracy": 0.6740000247955322,
"macro_precision": 0.7386647462844849,
"macro_recall": 0.6805440783500671
},
"wikitoxic_obscene": {
"macro_f1": 0.5426650643348694,
"accuracy": 0.5509999990463257,
"macro_precision": 0.6066822409629822,
"macro_recall": 0.5889812707901001
},
"wikitoxic_threat": {
"macro_f1": 0.32418739795684814,
"accuracy": 0.382999986410141,
"macro_precision": 0.5332828760147095,
"macro_recall": 0.677301287651062
},
"wikitoxic_insult": {
"macro_f1": 0.5197007060050964,
"accuracy": 0.5460000038146973,
"macro_precision": 0.7088633179664612,
"macro_recall": 0.6195178627967834
}
}
} |
1.0 | {
"name": "Qwen3-Embedding-8B",
"model_type": "embedding",
"params": "8B",
"revision": "unknown",
"url": "https://huggingface.co/Qwen3-Embedding-8B"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.591328,
"accuracy": 0.635753,
"macro_precision": 0.700355,
"macro_recall": 0.669397
},
"by_task": {
"sentiment": {
"macro_f1": 0.891563,
"accuracy": 0.886761,
"macro_precision": 0.893797,
"macro_recall": 0.902851
},
"emotion": {
"macro_f1": 0.506864,
"accuracy": 0.545,
"macro_precision": 0.548924,
"macro_recall": 0.550798
},
"intent": {
"macro_f1": 0.58926,
"accuracy": 0.656333,
"macro_precision": 0.708527,
"macro_recall": 0.661964
},
"topic": {
"macro_f1": 0.443484,
"accuracy": 0.509727,
"macro_precision": 0.620146,
"macro_recall": 0.56565
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9409787058830261,
"accuracy": 0.9409999847412109,
"macro_precision": 0.9409074783325195,
"macro_recall": 0.941142201423645
},
"imdb": {
"macro_f1": 0.9469107389450073,
"accuracy": 0.9470000267028809,
"macro_precision": 0.9476759433746338,
"macro_recall": 0.9465738534927368
},
"appreviews": {
"macro_f1": 0.922999918460846,
"accuracy": 0.9229999780654907,
"macro_precision": 0.9230391979217529,
"macro_recall": 0.923051118850708
},
"yelpreviews": {
"macro_f1": 0.9599921703338623,
"accuracy": 0.9599999785423279,
"macro_precision": 0.9620493650436401,
"macro_recall": 0.961013674736023
},
"rottentomatoes": {
"macro_f1": 0.8608060479164124,
"accuracy": 0.8611631989479065,
"macro_precision": 0.8649088144302368,
"macro_recall": 0.8611632585525513
},
"financialphrasebank": {
"macro_f1": 0.7176883816719055,
"accuracy": 0.6884058117866516,
"macro_precision": 0.72420334815979,
"macro_recall": 0.7841603755950928
},
"emotiondair": {
"macro_f1": 0.5401405692100525,
"accuracy": 0.5910000205039978,
"macro_precision": 0.5484007000923157,
"macro_recall": 0.597652018070221
},
"empathetic": {
"macro_f1": 0.473588228225708,
"accuracy": 0.49900001287460327,
"macro_precision": 0.5494474172592163,
"macro_recall": 0.5039448738098145
},
"banking77": {
"macro_f1": 0.71482253074646,
"accuracy": 0.699999988079071,
"macro_precision": 0.7595980763435364,
"macro_recall": 0.7408914566040039
},
"biasframes_intent": {
"macro_f1": 0.398776650428772,
"accuracy": 0.5569999814033508,
"macro_precision": 0.7143589854240417,
"macro_recall": 0.5210214257240295
},
"massive": {
"macro_f1": 0.6541801691055298,
"accuracy": 0.7120000123977661,
"macro_precision": 0.6516250967979431,
"macro_recall": 0.7239789962768555
},
"agnews": {
"macro_f1": 0.7707853317260742,
"accuracy": 0.7839999794960022,
"macro_precision": 0.8135340809822083,
"macro_recall": 0.7818300127983093
},
"yahootopics": {
"macro_f1": 0.5887834429740906,
"accuracy": 0.640999972820282,
"macro_precision": 0.6388435959815979,
"macro_recall": 0.5808680057525635
},
"trueteacher": {
"macro_f1": 0.4126618504524231,
"accuracy": 0.5090000033378601,
"macro_precision": 0.5115970373153687,
"macro_recall": 0.5042126178741455
},
"manifesto": {
"macro_f1": 0.3240036964416504,
"accuracy": 0.46799999475479126,
"macro_precision": 0.35499346256256104,
"macro_recall": 0.40162768959999084
},
"capsotu": {
"macro_f1": 0.5475385785102844,
"accuracy": 0.5249999761581421,
"macro_precision": 0.5921947360038757,
"macro_recall": 0.6487294435501099
},
"biasframes_offensive": {
"macro_f1": 0.5426437258720398,
"accuracy": 0.6389999985694885,
"macro_precision": 0.7244020700454712,
"macro_recall": 0.5880531072616577
},
"biasframes_sex": {
"macro_f1": 0.26442575454711914,
"accuracy": 0.30300000309944153,
"macro_precision": 0.48936134576797485,
"macro_recall": 0.4601544737815857
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.46657270193099976,
"accuracy": 0.5580000281333923,
"macro_precision": 0.7552865743637085,
"macro_recall": 0.5691487789154053
},
"wikitoxic_obscene": {
"macro_f1": 0.38571250438690186,
"accuracy": 0.46799999475479126,
"macro_precision": 0.710321307182312,
"macro_recall": 0.5426182746887207
},
"wikitoxic_threat": {
"macro_f1": 0.2685002386569977,
"accuracy": 0.3019999861717224,
"macro_precision": 0.5296496152877808,
"macro_recall": 0.6349372267723083
},
"wikitoxic_insult": {
"macro_f1": 0.30670127272605896,
"accuracy": 0.4099999964237213,
"macro_precision": 0.7014169692993164,
"macro_recall": 0.5099667906761169
}
}
} |
1.0 | {
"name": "all-MiniLM-L6-v2",
"model_type": "embedding",
"params": "22M",
"revision": "unknown",
"url": "https://huggingface.co/all-MiniLM-L6-v2"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.365998,
"accuracy": 0.442739,
"macro_precision": 0.500873,
"macro_recall": 0.450317
},
"by_task": {
"sentiment": {
"macro_f1": 0.347246,
"accuracy": 0.487208,
"macro_precision": 0.503336,
"macro_recall": 0.477323
},
"emotion": {
"macro_f1": 0.132637,
"accuracy": 0.1405,
"macro_precision": 0.4083,
"macro_recall": 0.195105
},
"intent": {
"macro_f1": 0.410812,
"accuracy": 0.436,
"macro_precision": 0.541921,
"macro_recall": 0.462813
},
"topic": {
"macro_f1": 0.406434,
"accuracy": 0.475273,
"macro_precision": 0.505165,
"macro_recall": 0.47858
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.35260650515556335,
"accuracy": 0.48899999260902405,
"macro_precision": 0.5116842985153198,
"macro_recall": 0.5012728571891785
},
"imdb": {
"macro_f1": 0.3403925895690918,
"accuracy": 0.49300000071525574,
"macro_precision": 0.7449698448181152,
"macro_recall": 0.5058479309082031
},
"appreviews": {
"macro_f1": 0.4131661653518677,
"accuracy": 0.5320000052452087,
"macro_precision": 0.7029896974563599,
"macro_recall": 0.5356342792510986
},
"yelpreviews": {
"macro_f1": 0.32750505208969116,
"accuracy": 0.4869999885559082,
"macro_precision": 0.2434999942779541,
"macro_recall": 0.5
},
"rottentomatoes": {
"macro_f1": 0.3378419876098633,
"accuracy": 0.4990619122982025,
"macro_precision": 0.46404963731765747,
"macro_recall": 0.4990619122982025
},
"financialphrasebank": {
"macro_f1": 0.31196534633636475,
"accuracy": 0.4231884181499481,
"macro_precision": 0.35282549262046814,
"macro_recall": 0.32211822271347046
},
"emotiondair": {
"macro_f1": 0.11101524531841278,
"accuracy": 0.1080000028014183,
"macro_precision": 0.46109339594841003,
"macro_recall": 0.22658133506774902
},
"empathetic": {
"macro_f1": 0.1542579084634781,
"accuracy": 0.17299999296665192,
"macro_precision": 0.3555065989494324,
"macro_recall": 0.16362914443016052
},
"banking77": {
"macro_f1": 0.43356677889823914,
"accuracy": 0.4410000145435333,
"macro_precision": 0.5618711113929749,
"macro_recall": 0.4374663829803467
},
"biasframes_intent": {
"macro_f1": 0.46536508202552795,
"accuracy": 0.5289999842643738,
"macro_precision": 0.6406142711639404,
"macro_recall": 0.5584455728530884
},
"massive": {
"macro_f1": 0.33350270986557007,
"accuracy": 0.33799999952316284,
"macro_precision": 0.4232776463031769,
"macro_recall": 0.39252763986587524
},
"agnews": {
"macro_f1": 0.4948541224002838,
"accuracy": 0.5,
"macro_precision": 0.6265597939491272,
"macro_recall": 0.5069217085838318
},
"yahootopics": {
"macro_f1": 0.36257949471473694,
"accuracy": 0.38199999928474426,
"macro_precision": 0.5528976917266846,
"macro_recall": 0.34622490406036377
},
"trueteacher": {
"macro_f1": 0.395149827003479,
"accuracy": 0.49000000953674316,
"macro_precision": 0.4625127911567688,
"macro_recall": 0.4853178858757019
},
"manifesto": {
"macro_f1": 0.1498693972826004,
"accuracy": 0.3019999861717224,
"macro_precision": 0.18951760232448578,
"macro_recall": 0.1652449667453766
},
"capsotu": {
"macro_f1": 0.479082316160202,
"accuracy": 0.5289999842643738,
"macro_precision": 0.5245693922042847,
"macro_recall": 0.5023189783096313
},
"biasframes_offensive": {
"macro_f1": 0.47554171085357666,
"accuracy": 0.5049999952316284,
"macro_precision": 0.5725581049919128,
"macro_recall": 0.5465722680091858
},
"biasframes_sex": {
"macro_f1": 0.5124979019165039,
"accuracy": 0.7730000019073486,
"macro_precision": 0.5263156294822693,
"macro_recall": 0.5801851749420166
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.5129282474517822,
"accuracy": 0.5329999923706055,
"macro_precision": 0.5474761724472046,
"macro_recall": 0.5386421084403992
},
"wikitoxic_obscene": {
"macro_f1": 0.5017039179801941,
"accuracy": 0.503000020980835,
"macro_precision": 0.5081151723861694,
"macro_recall": 0.5083074569702148
},
"wikitoxic_threat": {
"macro_f1": 0.26442036032676697,
"accuracy": 0.3009999990463257,
"macro_precision": 0.5172396898269653,
"macro_recall": 0.5802111029624939
},
"wikitoxic_insult": {
"macro_f1": 0.32215073704719543,
"accuracy": 0.4099999964237213,
"macro_precision": 0.5290513038635254,
"macro_recall": 0.5044324398040771
}
}
} |
1.0 | {
"name": "bge-base-en-v1.5",
"model_type": "embedding",
"params": "137M",
"revision": "unknown",
"url": "https://huggingface.co/bge-base-en-v1.5"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.568329,
"accuracy": 0.592612,
"macro_precision": 0.641254,
"macro_recall": 0.640123
},
"by_task": {
"sentiment": {
"macro_f1": 0.819812,
"accuracy": 0.81391,
"macro_precision": 0.852102,
"macro_recall": 0.856473
},
"emotion": {
"macro_f1": 0.361241,
"accuracy": 0.437,
"macro_precision": 0.448951,
"macro_recall": 0.403232
},
"intent": {
"macro_f1": 0.579773,
"accuracy": 0.596333,
"macro_precision": 0.599851,
"macro_recall": 0.625834
},
"topic": {
"macro_f1": 0.465687,
"accuracy": 0.499182,
"macro_precision": 0.572503,
"macro_recall": 0.569081
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9296748638153076,
"accuracy": 0.9300000071525574,
"macro_precision": 0.9338191747665405,
"macro_recall": 0.9288598895072937
},
"imdb": {
"macro_f1": 0.8964173793792725,
"accuracy": 0.8970000147819519,
"macro_precision": 0.9015624523162842,
"macro_recall": 0.8956555128097534
},
"appreviews": {
"macro_f1": 0.9033816456794739,
"accuracy": 0.9039999842643738,
"macro_precision": 0.9129323959350586,
"macro_recall": 0.9034178256988525
},
"yelpreviews": {
"macro_f1": 0.9439979791641235,
"accuracy": 0.9440000057220459,
"macro_precision": 0.9451367855072021,
"macro_recall": 0.9447946548461914
},
"rottentomatoes": {
"macro_f1": 0.8140226006507874,
"accuracy": 0.8142589330673218,
"macro_precision": 0.8158644437789917,
"macro_recall": 0.8142589330673218
},
"financialphrasebank": {
"macro_f1": 0.4313793182373047,
"accuracy": 0.3942028880119324,
"macro_precision": 0.6032983064651489,
"macro_recall": 0.6518524885177612
},
"emotiondair": {
"macro_f1": 0.42592093348503113,
"accuracy": 0.5249999761581421,
"macro_precision": 0.502555787563324,
"macro_recall": 0.46722355484962463
},
"empathetic": {
"macro_f1": 0.29656127095222473,
"accuracy": 0.3490000069141388,
"macro_precision": 0.39534541964530945,
"macro_recall": 0.33924028277397156
},
"banking77": {
"macro_f1": 0.6375364065170288,
"accuracy": 0.6449999809265137,
"macro_precision": 0.682538628578186,
"macro_recall": 0.6667591333389282
},
"biasframes_intent": {
"macro_f1": 0.5726391077041626,
"accuracy": 0.5820000171661377,
"macro_precision": 0.577385425567627,
"macro_recall": 0.5740678310394287
},
"massive": {
"macro_f1": 0.5291436910629272,
"accuracy": 0.5619999766349792,
"macro_precision": 0.539627730846405,
"macro_recall": 0.6366740465164185
},
"agnews": {
"macro_f1": 0.6345731616020203,
"accuracy": 0.6489999890327454,
"macro_precision": 0.6807701587677002,
"macro_recall": 0.652498722076416
},
"yahootopics": {
"macro_f1": 0.5134701132774353,
"accuracy": 0.5929999947547913,
"macro_precision": 0.5593596696853638,
"macro_recall": 0.5850311517715454
},
"trueteacher": {
"macro_f1": 0.4814004898071289,
"accuracy": 0.4819999933242798,
"macro_precision": 0.4823671579360962,
"macro_recall": 0.48247748613357544
},
"manifesto": {
"macro_f1": 0.20281097292900085,
"accuracy": 0.33500000834465027,
"macro_precision": 0.24518485367298126,
"macro_recall": 0.22956110537052155
},
"capsotu": {
"macro_f1": 0.5365762710571289,
"accuracy": 0.5789999961853027,
"macro_precision": 0.5299004316329956,
"macro_recall": 0.6417205333709717
},
"biasframes_offensive": {
"macro_f1": 0.5659722089767456,
"accuracy": 0.6039999723434448,
"macro_precision": 0.5927748084068298,
"macro_recall": 0.5746545195579529
},
"biasframes_sex": {
"macro_f1": 0.16457810997962952,
"accuracy": 0.1679999977350235,
"macro_precision": 0.5050225853919983,
"macro_recall": 0.5098469853401184
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.7208366394042969,
"accuracy": 0.7260000109672546,
"macro_precision": 0.7522772550582886,
"macro_recall": 0.7300294637680054
},
"wikitoxic_obscene": {
"macro_f1": 0.6794031858444214,
"accuracy": 0.6819999814033508,
"macro_precision": 0.7384219169616699,
"macro_recall": 0.71601802110672
},
"wikitoxic_threat": {
"macro_f1": 0.16755354404449463,
"accuracy": 0.17399999499320984,
"macro_precision": 0.5209817290306091,
"macro_recall": 0.5571510195732117
},
"wikitoxic_insult": {
"macro_f1": 0.4553821086883545,
"accuracy": 0.49900001287460327,
"macro_precision": 0.6904691457748413,
"macro_recall": 0.5809069871902466
}
}
} |
1.0 | {
"name": "bge-large-en-v1.5",
"model_type": "embedding",
"params": "434M",
"revision": "unknown",
"url": "https://huggingface.co/bge-large-en-v1.5"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.554848,
"accuracy": 0.589174,
"macro_precision": 0.651518,
"macro_recall": 0.637449
},
"by_task": {
"sentiment": {
"macro_f1": 0.839938,
"accuracy": 0.830804,
"macro_precision": 0.866493,
"macro_recall": 0.869504
},
"emotion": {
"macro_f1": 0.394497,
"accuracy": 0.4655,
"macro_precision": 0.493638,
"macro_recall": 0.434793
},
"intent": {
"macro_f1": 0.582192,
"accuracy": 0.593667,
"macro_precision": 0.618523,
"macro_recall": 0.6263
},
"topic": {
"macro_f1": 0.421042,
"accuracy": 0.478636,
"macro_precision": 0.571964,
"macro_recall": 0.550762
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9479948282241821,
"accuracy": 0.9480000138282776,
"macro_precision": 0.9480941295623779,
"macro_recall": 0.9483811259269714
},
"imdb": {
"macro_f1": 0.9349526166915894,
"accuracy": 0.9350000023841858,
"macro_precision": 0.9349770545959473,
"macro_recall": 0.9349300265312195
},
"appreviews": {
"macro_f1": 0.9189493656158447,
"accuracy": 0.9190000295639038,
"macro_precision": 0.919572114944458,
"macro_recall": 0.9188587665557861
},
"yelpreviews": {
"macro_f1": 0.9509975910186768,
"accuracy": 0.9509999752044678,
"macro_precision": 0.9512209892272949,
"macro_recall": 0.9514611959457397
},
"rottentomatoes": {
"macro_f1": 0.8233464956283569,
"accuracy": 0.8245778679847717,
"macro_precision": 0.8338873386383057,
"macro_recall": 0.824577808380127
},
"financialphrasebank": {
"macro_f1": 0.4633880853652954,
"accuracy": 0.4072463810443878,
"macro_precision": 0.6112039089202881,
"macro_recall": 0.6388155817985535
},
"emotiondair": {
"macro_f1": 0.4404083490371704,
"accuracy": 0.5450000166893005,
"macro_precision": 0.5091676115989685,
"macro_recall": 0.480523020029068
},
"empathetic": {
"macro_f1": 0.34858500957489014,
"accuracy": 0.38600000739097595,
"macro_precision": 0.478109210729599,
"macro_recall": 0.3890635371208191
},
"banking77": {
"macro_f1": 0.6825442910194397,
"accuracy": 0.6769999861717224,
"macro_precision": 0.7381986379623413,
"macro_recall": 0.704035758972168
},
"biasframes_intent": {
"macro_f1": 0.5388701558113098,
"accuracy": 0.5789999961853027,
"macro_precision": 0.5808204412460327,
"macro_recall": 0.5598134994506836
},
"massive": {
"macro_f1": 0.52516108751297,
"accuracy": 0.5249999761581421,
"macro_precision": 0.5365509986877441,
"macro_recall": 0.6150498986244202
},
"agnews": {
"macro_f1": 0.7664446234703064,
"accuracy": 0.7710000276565552,
"macro_precision": 0.7729312181472778,
"macro_recall": 0.7728698253631592
},
"yahootopics": {
"macro_f1": 0.5731387138366699,
"accuracy": 0.6349999904632568,
"macro_precision": 0.6037620306015015,
"macro_recall": 0.5753264427185059
},
"trueteacher": {
"macro_f1": 0.3989500403404236,
"accuracy": 0.4729999899864197,
"macro_precision": 0.45366013050079346,
"macro_recall": 0.47728073596954346
},
"manifesto": {
"macro_f1": 0.259449303150177,
"accuracy": 0.42500001192092896,
"macro_precision": 0.2871650159358978,
"macro_recall": 0.3101934492588043
},
"capsotu": {
"macro_f1": 0.5617057085037231,
"accuracy": 0.6050000190734863,
"macro_precision": 0.5457377433776855,
"macro_recall": 0.6577588319778442
},
"biasframes_offensive": {
"macro_f1": 0.46127596497535706,
"accuracy": 0.5669999718666077,
"macro_precision": 0.538241982460022,
"macro_recall": 0.5169197916984558
},
"biasframes_sex": {
"macro_f1": 0.12777671217918396,
"accuracy": 0.12800000607967377,
"macro_precision": 0.5167240500450134,
"macro_recall": 0.5209751725196838
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.49887073040008545,
"accuracy": 0.5659999847412109,
"macro_precision": 0.6782971024513245,
"macro_recall": 0.5759053230285645
},
"wikitoxic_obscene": {
"macro_f1": 0.43688660860061646,
"accuracy": 0.4959999918937683,
"macro_precision": 0.6837161183357239,
"macro_recall": 0.5643137693405151
},
"wikitoxic_threat": {
"macro_f1": 0.11035144329071045,
"accuracy": 0.11100000143051147,
"macro_precision": 0.5085204243659973,
"macro_recall": 0.5133605599403381
},
"wikitoxic_insult": {
"macro_f1": 0.43661725521087646,
"accuracy": 0.4880000054836273,
"macro_precision": 0.7028484344482422,
"macro_recall": 0.5734736919403076
}
}
} |
1.0 | {
"name": "e5-base-v2",
"model_type": "embedding",
"params": "110M",
"revision": "unknown",
"url": "https://huggingface.co/e5-base-v2"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.596597,
"accuracy": 0.621787,
"macro_precision": 0.644211,
"macro_recall": 0.653966
},
"by_task": {
"sentiment": {
"macro_f1": 0.833574,
"accuracy": 0.827718,
"macro_precision": 0.845933,
"macro_recall": 0.857147
},
"emotion": {
"macro_f1": 0.401925,
"accuracy": 0.458,
"macro_precision": 0.476025,
"macro_recall": 0.431148
},
"intent": {
"macro_f1": 0.563119,
"accuracy": 0.575,
"macro_precision": 0.589318,
"macro_recall": 0.611157
},
"topic": {
"macro_f1": 0.511863,
"accuracy": 0.552,
"macro_precision": 0.57973,
"macro_recall": 0.595328
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9257502555847168,
"accuracy": 0.9259999990463257,
"macro_precision": 0.9282991886138916,
"macro_recall": 0.9251173734664917
},
"imdb": {
"macro_f1": 0.8977016806602478,
"accuracy": 0.8980000019073486,
"macro_precision": 0.8996210098266602,
"macro_recall": 0.8972024917602539
},
"appreviews": {
"macro_f1": 0.926961362361908,
"accuracy": 0.9269999861717224,
"macro_precision": 0.9274652600288391,
"macro_recall": 0.9268753528594971
},
"yelpreviews": {
"macro_f1": 0.9509889483451843,
"accuracy": 0.9509999752044678,
"macro_precision": 0.9531490802764893,
"macro_recall": 0.952033519744873
},
"rottentomatoes": {
"macro_f1": 0.8367706537246704,
"accuracy": 0.8367729783058167,
"macro_precision": 0.8367919921875,
"macro_recall": 0.8367729783058167
},
"financialphrasebank": {
"macro_f1": 0.46327078342437744,
"accuracy": 0.4275362193584442,
"macro_precision": 0.5302709341049194,
"macro_recall": 0.6048827171325684
},
"emotiondair": {
"macro_f1": 0.4324241578578949,
"accuracy": 0.5059999823570251,
"macro_precision": 0.45224061608314514,
"macro_recall": 0.45467713475227356
},
"empathetic": {
"macro_f1": 0.3714253604412079,
"accuracy": 0.4099999964237213,
"macro_precision": 0.4998090863227844,
"macro_recall": 0.40761905908584595
},
"banking77": {
"macro_f1": 0.6155748963356018,
"accuracy": 0.6200000047683716,
"macro_precision": 0.6523025631904602,
"macro_recall": 0.6465913653373718
},
"biasframes_intent": {
"macro_f1": 0.6012471318244934,
"accuracy": 0.6190000176429749,
"macro_precision": 0.6202475428581238,
"macro_recall": 0.6064669489860535
},
"massive": {
"macro_f1": 0.4725361466407776,
"accuracy": 0.4860000014305115,
"macro_precision": 0.49540361762046814,
"macro_recall": 0.5804132223129272
},
"agnews": {
"macro_f1": 0.7608605623245239,
"accuracy": 0.7680000066757202,
"macro_precision": 0.7769278883934021,
"macro_recall": 0.7691781520843506
},
"yahootopics": {
"macro_f1": 0.5515064597129822,
"accuracy": 0.6179999709129333,
"macro_precision": 0.5705708861351013,
"macro_recall": 0.5562304258346558
},
"trueteacher": {
"macro_f1": 0.43589115142822266,
"accuracy": 0.4869999885559082,
"macro_precision": 0.4850466251373291,
"macro_recall": 0.4906826615333557
},
"manifesto": {
"macro_f1": 0.20859912037849426,
"accuracy": 0.3569999933242798,
"macro_precision": 0.24746529757976532,
"macro_recall": 0.24918732047080994
},
"capsotu": {
"macro_f1": 0.5291017293930054,
"accuracy": 0.5889999866485596,
"macro_precision": 0.5093950629234314,
"macro_recall": 0.6365930438041687
},
"biasframes_offensive": {
"macro_f1": 0.5871238708496094,
"accuracy": 0.6169999837875366,
"macro_precision": 0.6074733734130859,
"macro_recall": 0.591576337814331
},
"biasframes_sex": {
"macro_f1": 0.19746507704257965,
"accuracy": 0.2070000022649765,
"macro_precision": 0.5024656653404236,
"macro_recall": 0.5062779188156128
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.671910285949707,
"accuracy": 0.6790000200271606,
"macro_precision": 0.7040552496910095,
"macro_recall": 0.6832839250564575
},
"wikitoxic_obscene": {
"macro_f1": 0.6416641473770142,
"accuracy": 0.6460000276565552,
"macro_precision": 0.7081863284111023,
"macro_recall": 0.6823936700820923
},
"wikitoxic_threat": {
"macro_f1": 0.30706092715263367,
"accuracy": 0.3610000014305115,
"macro_precision": 0.5256366729736328,
"macro_recall": 0.6332731246948242
},
"wikitoxic_insult": {
"macro_f1": 0.7393083572387695,
"accuracy": 0.7429999709129333,
"macro_precision": 0.7398092150688171,
"macro_recall": 0.7499332427978516
}
}
} |
1.0 | {
"name": "e5-large-v2",
"model_type": "embedding",
"params": "335M",
"revision": "unknown",
"url": "https://huggingface.co/e5-large-v2"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.597409,
"accuracy": 0.616721,
"macro_precision": 0.652853,
"macro_recall": 0.65146
},
"by_task": {
"sentiment": {
"macro_f1": 0.855361,
"accuracy": 0.843479,
"macro_precision": 0.877192,
"macro_recall": 0.880996
},
"emotion": {
"macro_f1": 0.412053,
"accuracy": 0.461,
"macro_precision": 0.474899,
"macro_recall": 0.460835
},
"intent": {
"macro_f1": 0.54584,
"accuracy": 0.539,
"macro_precision": 0.598506,
"macro_recall": 0.579836
},
"topic": {
"macro_f1": 0.504472,
"accuracy": 0.542545,
"macro_precision": 0.577664,
"macro_recall": 0.580451
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9428738355636597,
"accuracy": 0.9430000185966492,
"macro_precision": 0.9441698789596558,
"macro_recall": 0.9424151182174683
},
"imdb": {
"macro_f1": 0.9277734756469727,
"accuracy": 0.9279999732971191,
"macro_precision": 0.9300627708435059,
"macro_recall": 0.9271707534790039
},
"appreviews": {
"macro_f1": 0.9099823236465454,
"accuracy": 0.9100000262260437,
"macro_precision": 0.9106762409210205,
"macro_recall": 0.9101702570915222
},
"yelpreviews": {
"macro_f1": 0.9759995937347412,
"accuracy": 0.9760000109672546,
"macro_precision": 0.9763883352279663,
"macro_recall": 0.9765561819076538
},
"rottentomatoes": {
"macro_f1": 0.85456782579422,
"accuracy": 0.8545966148376465,
"macro_precision": 0.8548777103424072,
"macro_recall": 0.8545966148376465
},
"financialphrasebank": {
"macro_f1": 0.5209692120552063,
"accuracy": 0.4492753744125366,
"macro_precision": 0.6469757556915283,
"macro_recall": 0.6750686168670654
},
"emotiondair": {
"macro_f1": 0.44059354066848755,
"accuracy": 0.49000000953674316,
"macro_precision": 0.4662151336669922,
"macro_recall": 0.4994733929634094
},
"empathetic": {
"macro_f1": 0.3835121989250183,
"accuracy": 0.4320000112056732,
"macro_precision": 0.48358288407325745,
"macro_recall": 0.4221965968608856
},
"banking77": {
"macro_f1": 0.583658754825592,
"accuracy": 0.5630000233650208,
"macro_precision": 0.6998542547225952,
"macro_recall": 0.594748318195343
},
"biasframes_intent": {
"macro_f1": 0.5438522100448608,
"accuracy": 0.5440000295639038,
"macro_precision": 0.5488690137863159,
"macro_recall": 0.5485363602638245
},
"massive": {
"macro_f1": 0.5100078582763672,
"accuracy": 0.5099999904632568,
"macro_precision": 0.5467942953109741,
"macro_recall": 0.5962241888046265
},
"agnews": {
"macro_f1": 0.7865350246429443,
"accuracy": 0.7929999828338623,
"macro_precision": 0.801010012626648,
"macro_recall": 0.7923803925514221
},
"yahootopics": {
"macro_f1": 0.5175836682319641,
"accuracy": 0.5619999766349792,
"macro_precision": 0.5682903528213501,
"macro_recall": 0.5555293560028076
},
"trueteacher": {
"macro_f1": 0.46867427229881287,
"accuracy": 0.47999998927116394,
"macro_precision": 0.4799731969833374,
"macro_recall": 0.48182135820388794
},
"manifesto": {
"macro_f1": 0.21668392419815063,
"accuracy": 0.33899998664855957,
"macro_precision": 0.27807387709617615,
"macro_recall": 0.24636535346508026
},
"capsotu": {
"macro_f1": 0.504774808883667,
"accuracy": 0.5479999780654907,
"macro_precision": 0.5179288387298584,
"macro_recall": 0.5643118619918823
},
"biasframes_offensive": {
"macro_f1": 0.5198471546173096,
"accuracy": 0.5210000276565552,
"macro_precision": 0.5234423875808716,
"macro_recall": 0.523840069770813
},
"biasframes_sex": {
"macro_f1": 0.38923200964927673,
"accuracy": 0.49799999594688416,
"macro_precision": 0.5157051086425781,
"macro_recall": 0.5717475414276123
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.6889909505844116,
"accuracy": 0.6949999928474426,
"macro_precision": 0.7192105650901794,
"macro_recall": 0.699086606502533
},
"wikitoxic_obscene": {
"macro_f1": 0.6598845720291138,
"accuracy": 0.6639999747276306,
"macro_precision": 0.7292996048927307,
"macro_recall": 0.7008911967277527
},
"wikitoxic_threat": {
"macro_f1": 0.28966307640075684,
"accuracy": 0.3330000042915344,
"macro_precision": 0.5286061763763428,
"macro_recall": 0.6403100490570068
},
"wikitoxic_insult": {
"macro_f1": 0.5073270797729492,
"accuracy": 0.5350000262260437,
"macro_precision": 0.6927686929702759,
"macro_recall": 0.6086787581443787
}
}
} |
1.0 | {
"name": "e5-mistral-7b-instruct",
"model_type": "embedding",
"params": "7B",
"revision": "unknown",
"url": "https://huggingface.co/e5-mistral-7b-instruct"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.575567,
"accuracy": 0.615418,
"macro_precision": 0.689792,
"macro_recall": 0.662082
},
"by_task": {
"sentiment": {
"macro_f1": 0.872693,
"accuracy": 0.864698,
"macro_precision": 0.882869,
"macro_recall": 0.893214
},
"emotion": {
"macro_f1": 0.499561,
"accuracy": 0.542,
"macro_precision": 0.56181,
"macro_recall": 0.538843
},
"intent": {
"macro_f1": 0.649362,
"accuracy": 0.665333,
"macro_precision": 0.674613,
"macro_recall": 0.685198
},
"topic": {
"macro_f1": 0.407192,
"accuracy": 0.479182,
"macro_precision": 0.611887,
"macro_recall": 0.552114
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9398496150970459,
"accuracy": 0.9399999976158142,
"macro_precision": 0.9414554238319397,
"macro_recall": 0.9393349885940552
},
"imdb": {
"macro_f1": 0.9142584204673767,
"accuracy": 0.9150000214576721,
"macro_precision": 0.9237684607505798,
"macro_recall": 0.9131993055343628
},
"appreviews": {
"macro_f1": 0.931954026222229,
"accuracy": 0.9319999814033508,
"macro_precision": 0.9326616525650024,
"macro_recall": 0.9318516254425049
},
"yelpreviews": {
"macro_f1": 0.9829979538917542,
"accuracy": 0.9829999804496765,
"macro_precision": 0.9830597639083862,
"macro_recall": 0.9833787679672241
},
"rottentomatoes": {
"macro_f1": 0.8442332744598389,
"accuracy": 0.8442776799201965,
"macro_precision": 0.8446707725524902,
"macro_recall": 0.8442776203155518
},
"financialphrasebank": {
"macro_f1": 0.6228639483451843,
"accuracy": 0.573913037776947,
"macro_precision": 0.671596109867096,
"macro_recall": 0.7472397685050964
},
"emotiondair": {
"macro_f1": 0.49798208475112915,
"accuracy": 0.5450000166893005,
"macro_precision": 0.5303964614868164,
"macro_recall": 0.5377311110496521
},
"empathetic": {
"macro_f1": 0.5011402368545532,
"accuracy": 0.5389999747276306,
"macro_precision": 0.5932239294052124,
"macro_recall": 0.5399547219276428
},
"banking77": {
"macro_f1": 0.6527521014213562,
"accuracy": 0.6499999761581421,
"macro_precision": 0.6966351866722107,
"macro_recall": 0.686503529548645
},
"biasframes_intent": {
"macro_f1": 0.6684172749519348,
"accuracy": 0.6779999732971191,
"macro_precision": 0.6805517673492432,
"macro_recall": 0.6689438223838806
},
"massive": {
"macro_f1": 0.6269175410270691,
"accuracy": 0.6679999828338623,
"macro_precision": 0.6466506719589233,
"macro_recall": 0.7001461982727051
},
"agnews": {
"macro_f1": 0.7702938318252563,
"accuracy": 0.7799999713897705,
"macro_precision": 0.8165492415428162,
"macro_recall": 0.7775763273239136
},
"yahootopics": {
"macro_f1": 0.6368180513381958,
"accuracy": 0.7120000123977661,
"macro_precision": 0.6586915254592896,
"macro_recall": 0.6391779184341431
},
"trueteacher": {
"macro_f1": 0.4197064936161041,
"accuracy": 0.48100000619888306,
"macro_precision": 0.4615013599395752,
"macro_recall": 0.4771687090396881
},
"manifesto": {
"macro_f1": 0.30002036690711975,
"accuracy": 0.5,
"macro_precision": 0.3562106490135193,
"macro_recall": 0.3193519115447998
},
"capsotu": {
"macro_f1": 0.6181511282920837,
"accuracy": 0.6190000176429749,
"macro_precision": 0.6203116774559021,
"macro_recall": 0.6929008960723877
},
"biasframes_offensive": {
"macro_f1": 0.46933332085609436,
"accuracy": 0.6019999980926514,
"macro_precision": 0.675881028175354,
"macro_recall": 0.5447821021080017
},
"biasframes_sex": {
"macro_f1": 0.07987284660339355,
"accuracy": 0.08100000023841858,
"macro_precision": 0.5296826958656311,
"macro_recall": 0.5122080445289612
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.3689675033092499,
"accuracy": 0.5059999823570251,
"macro_precision": 0.724402904510498,
"macro_recall": 0.5184664726257324
},
"wikitoxic_obscene": {
"macro_f1": 0.3256414532661438,
"accuracy": 0.43299999833106995,
"macro_precision": 0.6593969464302063,
"macro_recall": 0.5122124552726746
},
"wikitoxic_threat": {
"macro_f1": 0.09495565295219421,
"accuracy": 0.0949999988079071,
"macro_precision": 0.5231822729110718,
"macro_recall": 0.5266736149787903
},
"wikitoxic_insult": {
"macro_f1": 0.39535343647003174,
"accuracy": 0.4620000123977661,
"macro_precision": 0.7049510478973389,
"macro_recall": 0.5527304410934448
}
}
} |
1.0 | {
"name": "gte-base-en-v1.5",
"model_type": "embedding",
"params": "137M",
"revision": "unknown",
"url": "https://huggingface.co/gte-base-en-v1.5"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.584158,
"accuracy": 0.607613,
"macro_precision": 0.649384,
"macro_recall": 0.653511
},
"by_task": {
"sentiment": {
"macro_f1": 0.825544,
"accuracy": 0.817247,
"macro_precision": 0.857819,
"macro_recall": 0.85287
},
"emotion": {
"macro_f1": 0.371917,
"accuracy": 0.429,
"macro_precision": 0.399497,
"macro_recall": 0.40653
},
"intent": {
"macro_f1": 0.587941,
"accuracy": 0.595667,
"macro_precision": 0.608567,
"macro_recall": 0.628405
},
"topic": {
"macro_f1": 0.490051,
"accuracy": 0.529,
"macro_precision": 0.592258,
"macro_recall": 0.596523
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.8979170322418213,
"accuracy": 0.8989999890327454,
"macro_precision": 0.9099421501159668,
"macro_recall": 0.8969283103942871
},
"imdb": {
"macro_f1": 0.8459572792053223,
"accuracy": 0.8500000238418579,
"macro_precision": 0.8798589706420898,
"macro_recall": 0.8463601469993591
},
"appreviews": {
"macro_f1": 0.9289840459823608,
"accuracy": 0.9290000200271606,
"macro_precision": 0.9291197061538696,
"macro_recall": 0.9289394617080688
},
"yelpreviews": {
"macro_f1": 0.9719838500022888,
"accuracy": 0.972000002861023,
"macro_precision": 0.9719424247741699,
"macro_recall": 0.9720330834388733
},
"rottentomatoes": {
"macro_f1": 0.8423840999603271,
"accuracy": 0.8433395624160767,
"macro_precision": 0.8518723249435425,
"macro_recall": 0.8433395624160767
},
"financialphrasebank": {
"macro_f1": 0.46603572368621826,
"accuracy": 0.4101449251174927,
"macro_precision": 0.6041796803474426,
"macro_recall": 0.6296189427375793
},
"emotiondair": {
"macro_f1": 0.42110711336135864,
"accuracy": 0.49900001287460327,
"macro_precision": 0.4316229522228241,
"macro_recall": 0.4499507546424866
},
"empathetic": {
"macro_f1": 0.322726309299469,
"accuracy": 0.35899999737739563,
"macro_precision": 0.36737167835235596,
"macro_recall": 0.3631083071231842
},
"banking77": {
"macro_f1": 0.6550149321556091,
"accuracy": 0.6489999890327454,
"macro_precision": 0.6832579374313354,
"macro_recall": 0.6764678359031677
},
"biasframes_intent": {
"macro_f1": 0.596967339515686,
"accuracy": 0.597000002861023,
"macro_precision": 0.601468563079834,
"macro_recall": 0.6011562347412109
},
"massive": {
"macro_f1": 0.5118401050567627,
"accuracy": 0.5410000085830688,
"macro_precision": 0.5409731268882751,
"macro_recall": 0.6075910925865173
},
"agnews": {
"macro_f1": 0.7503072619438171,
"accuracy": 0.7580000162124634,
"macro_precision": 0.7593144774436951,
"macro_recall": 0.7591022253036499
},
"yahootopics": {
"macro_f1": 0.559667706489563,
"accuracy": 0.6200000047683716,
"macro_precision": 0.5773304104804993,
"macro_recall": 0.558665931224823
},
"trueteacher": {
"macro_f1": 0.34951454401016235,
"accuracy": 0.4830000102519989,
"macro_precision": 0.4269310235977173,
"macro_recall": 0.48850634694099426
},
"manifesto": {
"macro_f1": 0.21299287676811218,
"accuracy": 0.3580000102519989,
"macro_precision": 0.2409083992242813,
"macro_recall": 0.2498115450143814
},
"capsotu": {
"macro_f1": 0.5663331747055054,
"accuracy": 0.5899999737739563,
"macro_precision": 0.5791536569595337,
"macro_recall": 0.6441802382469177
},
"biasframes_offensive": {
"macro_f1": 0.6292864084243774,
"accuracy": 0.6399999856948853,
"macro_precision": 0.6317906379699707,
"macro_recall": 0.6285054683685303
},
"biasframes_sex": {
"macro_f1": 0.10084804892539978,
"accuracy": 0.10100000351667404,
"macro_precision": 0.5187318325042725,
"macro_recall": 0.5147339105606079
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.6507130861282349,
"accuracy": 0.6710000038146973,
"macro_precision": 0.7393791675567627,
"macro_recall": 0.6777241230010986
},
"wikitoxic_obscene": {
"macro_f1": 0.7413533926010132,
"accuracy": 0.7419999837875366,
"macro_precision": 0.7833998203277588,
"macro_recall": 0.77093505859375
},
"wikitoxic_threat": {
"macro_f1": 0.21763062477111816,
"accuracy": 0.23499999940395355,
"macro_precision": 0.5240511894226074,
"macro_recall": 0.5890547633171082
},
"wikitoxic_insult": {
"macro_f1": 0.6119153499603271,
"accuracy": 0.6209999918937683,
"macro_precision": 0.7338423728942871,
"macro_recall": 0.6805330514907837
}
}
} |
1.0 | {
"name": "gte-large-en-v1.5",
"model_type": "embedding",
"params": "434M",
"revision": "unknown",
"url": "https://huggingface.co/gte-large-en-v1.5"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.617418,
"accuracy": 0.637108,
"macro_precision": 0.668635,
"macro_recall": 0.680452
},
"by_task": {
"sentiment": {
"macro_f1": 0.849181,
"accuracy": 0.846062,
"macro_precision": 0.864922,
"macro_recall": 0.882493
},
"emotion": {
"macro_f1": 0.372492,
"accuracy": 0.407,
"macro_precision": 0.454332,
"macro_recall": 0.407355
},
"intent": {
"macro_f1": 0.589794,
"accuracy": 0.592667,
"macro_precision": 0.612692,
"macro_recall": 0.625439
},
"topic": {
"macro_f1": 0.543067,
"accuracy": 0.577091,
"macro_precision": 0.615791,
"macro_recall": 0.634906
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9519808292388916,
"accuracy": 0.9520000219345093,
"macro_precision": 0.9519065618515015,
"macro_recall": 0.9521236419677734
},
"imdb": {
"macro_f1": 0.9419607520103455,
"accuracy": 0.9419999718666077,
"macro_precision": 0.9419607520103455,
"macro_recall": 0.9419607520103455
},
"appreviews": {
"macro_f1": 0.9069663882255554,
"accuracy": 0.9070000052452087,
"macro_precision": 0.9080474376678467,
"macro_recall": 0.9072100520133972
},
"yelpreviews": {
"macro_f1": 0.9308948516845703,
"accuracy": 0.9309999942779541,
"macro_precision": 0.9370794296264648,
"macro_recall": 0.9326444864273071
},
"rottentomatoes": {
"macro_f1": 0.873044490814209,
"accuracy": 0.8733583688735962,
"macro_precision": 0.877086877822876,
"macro_recall": 0.8733583688735962
},
"financialphrasebank": {
"macro_f1": 0.4902401566505432,
"accuracy": 0.47101449966430664,
"macro_precision": 0.5734497308731079,
"macro_recall": 0.6876605749130249
},
"emotiondair": {
"macro_f1": 0.4007812738418579,
"accuracy": 0.453000009059906,
"macro_precision": 0.4739866852760315,
"macro_recall": 0.45103153586387634
},
"empathetic": {
"macro_f1": 0.34420278668403625,
"accuracy": 0.3610000014305115,
"macro_precision": 0.43467679619789124,
"macro_recall": 0.36367812752723694
},
"banking77": {
"macro_f1": 0.6315518617630005,
"accuracy": 0.6320000290870667,
"macro_precision": 0.6847038269042969,
"macro_recall": 0.6546837687492371
},
"biasframes_intent": {
"macro_f1": 0.5647697448730469,
"accuracy": 0.5649999976158142,
"macro_precision": 0.566199541091919,
"macro_recall": 0.5665242671966553
},
"massive": {
"macro_f1": 0.5730605721473694,
"accuracy": 0.5809999704360962,
"macro_precision": 0.5871738195419312,
"macro_recall": 0.6551083922386169
},
"agnews": {
"macro_f1": 0.7396283149719238,
"accuracy": 0.75,
"macro_precision": 0.7764260768890381,
"macro_recall": 0.7503991723060608
},
"yahootopics": {
"macro_f1": 0.5612474679946899,
"accuracy": 0.6150000095367432,
"macro_precision": 0.5916916131973267,
"macro_recall": 0.5538436770439148
},
"trueteacher": {
"macro_f1": 0.40464848279953003,
"accuracy": 0.44999998807907104,
"macro_precision": 0.4316384792327881,
"macro_recall": 0.45337727665901184
},
"manifesto": {
"macro_f1": 0.2757111191749573,
"accuracy": 0.3889999985694885,
"macro_precision": 0.35540515184402466,
"macro_recall": 0.3327782452106476
},
"capsotu": {
"macro_f1": 0.5492952466011047,
"accuracy": 0.5899999737739563,
"macro_precision": 0.5632104873657227,
"macro_recall": 0.6364038586616516
},
"biasframes_offensive": {
"macro_f1": 0.46699607372283936,
"accuracy": 0.47699999809265137,
"macro_precision": 0.5052019357681274,
"macro_recall": 0.5044152736663818
},
"biasframes_sex": {
"macro_f1": 0.20775750279426575,
"accuracy": 0.2160000056028366,
"macro_precision": 0.5308035612106323,
"macro_recall": 0.5757741928100586
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.8219228982925415,
"accuracy": 0.8240000009536743,
"macro_precision": 0.8474968075752258,
"macro_recall": 0.8273673057556152
},
"wikitoxic_obscene": {
"macro_f1": 0.8159528970718384,
"accuracy": 0.8159999847412109,
"macro_precision": 0.8326195478439331,
"macro_recall": 0.8358572125434875
},
"wikitoxic_threat": {
"macro_f1": 0.3788304030895233,
"accuracy": 0.4690000116825104,
"macro_precision": 0.5382608771324158,
"macro_recall": 0.7222803235054016
},
"wikitoxic_insult": {
"macro_f1": 0.7517458200454712,
"accuracy": 0.7519999742507935,
"macro_precision": 0.8009515404701233,
"macro_recall": 0.7914656400680542
}
}
} |
1.0 | {
"name": "gte-modernbert-base",
"model_type": "embedding",
"params": "149M",
"revision": "unknown",
"url": "https://huggingface.co/gte-modernbert-base"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.58639,
"accuracy": 0.609029,
"macro_precision": 0.655794,
"macro_recall": 0.6503
},
"by_task": {
"sentiment": {
"macro_f1": 0.867424,
"accuracy": 0.866605,
"macro_precision": 0.875204,
"macro_recall": 0.870719
},
"emotion": {
"macro_f1": 0.415074,
"accuracy": 0.449,
"macro_precision": 0.463846,
"macro_recall": 0.446823
},
"intent": {
"macro_f1": 0.624427,
"accuracy": 0.635667,
"macro_precision": 0.645295,
"macro_recall": 0.659486
},
"topic": {
"macro_f1": 0.453874,
"accuracy": 0.490364,
"macro_precision": 0.573878,
"macro_recall": 0.564561
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9518982172012329,
"accuracy": 0.9520000219345093,
"macro_precision": 0.9531158208847046,
"macro_recall": 0.9514471292495728
},
"imdb": {
"macro_f1": 0.9087033867835999,
"accuracy": 0.9089999794960022,
"macro_precision": 0.9110391139984131,
"macro_recall": 0.9081318974494934
},
"appreviews": {
"macro_f1": 0.9229937791824341,
"accuracy": 0.9229999780654907,
"macro_precision": 0.923002302646637,
"macro_recall": 0.9229871034622192
},
"yelpreviews": {
"macro_f1": 0.9589908123016357,
"accuracy": 0.9589999914169312,
"macro_precision": 0.9611742496490479,
"macro_recall": 0.9600390195846558
},
"rottentomatoes": {
"macro_f1": 0.8174377679824829,
"accuracy": 0.8189493417739868,
"macro_precision": 0.8298747539520264,
"macro_recall": 0.8189493417739868
},
"financialphrasebank": {
"macro_f1": 0.6445218324661255,
"accuracy": 0.6376811861991882,
"macro_precision": 0.6730180978775024,
"macro_recall": 0.6627617478370667
},
"emotiondair": {
"macro_f1": 0.4404637813568115,
"accuracy": 0.5,
"macro_precision": 0.4772695004940033,
"macro_recall": 0.49262484908103943
},
"empathetic": {
"macro_f1": 0.3896840214729309,
"accuracy": 0.39800000190734863,
"macro_precision": 0.450422465801239,
"macro_recall": 0.4010213613510132
},
"banking77": {
"macro_f1": 0.6365822553634644,
"accuracy": 0.6320000290870667,
"macro_precision": 0.6876528263092041,
"macro_recall": 0.661003589630127
},
"biasframes_intent": {
"macro_f1": 0.6256166696548462,
"accuracy": 0.6259999871253967,
"macro_precision": 0.6264742016792297,
"macro_recall": 0.6271906495094299
},
"massive": {
"macro_f1": 0.6110821962356567,
"accuracy": 0.6489999890327454,
"macro_precision": 0.6217591762542725,
"macro_recall": 0.690264105796814
},
"agnews": {
"macro_f1": 0.7550400495529175,
"accuracy": 0.7580000162124634,
"macro_precision": 0.76431804895401,
"macro_recall": 0.7619649171829224
},
"yahootopics": {
"macro_f1": 0.5372447967529297,
"accuracy": 0.6029999852180481,
"macro_precision": 0.5605465769767761,
"macro_recall": 0.5449396967887878
},
"trueteacher": {
"macro_f1": 0.47455811500549316,
"accuracy": 0.4749999940395355,
"macro_precision": 0.4752989411354065,
"macro_recall": 0.47541648149490356
},
"manifesto": {
"macro_f1": 0.24006304144859314,
"accuracy": 0.38499999046325684,
"macro_precision": 0.2887086868286133,
"macro_recall": 0.287731796503067
},
"capsotu": {
"macro_f1": 0.4834834933280945,
"accuracy": 0.5180000066757202,
"macro_precision": 0.49493494629859924,
"macro_recall": 0.6106833815574646
},
"biasframes_offensive": {
"macro_f1": 0.5928753018379211,
"accuracy": 0.6320000290870667,
"macro_precision": 0.6301878690719604,
"macro_recall": 0.6012561321258545
},
"biasframes_sex": {
"macro_f1": 0.1315796822309494,
"accuracy": 0.13199999928474426,
"macro_precision": 0.511141300201416,
"macro_recall": 0.5150084495544434
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.4754565358161926,
"accuracy": 0.5519999861717224,
"macro_precision": 0.6656557321548462,
"macro_recall": 0.562312126159668
},
"wikitoxic_obscene": {
"macro_f1": 0.5205207467079163,
"accuracy": 0.546999990940094,
"macro_precision": 0.6655195951461792,
"macro_recall": 0.6017239689826965
},
"wikitoxic_threat": {
"macro_f1": 0.15296727418899536,
"accuracy": 0.15700000524520874,
"macro_precision": 0.5248026847839355,
"macro_recall": 0.5591003894805908
},
"wikitoxic_insult": {
"macro_f1": 0.6288232803344727,
"accuracy": 0.6349999904632568,
"macro_precision": 0.73154616355896,
"macro_recall": 0.6900323629379272
}
}
} |
1.0 | {
"name": "Llama-3.2-3B-Instruct",
"model_type": "llm",
"params": "3.2B",
"revision": "unknown",
"url": "https://huggingface.co/Llama-3.2-3B-Instruct"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.430162,
"accuracy": 0.463103,
"macro_precision": 0.505333,
"macro_recall": 0.477962
},
"by_task": {
"sentiment": {
"macro_f1": 0.455847,
"accuracy": 0.48471,
"macro_precision": 0.495721,
"macro_recall": 0.493113
},
"emotion": {
"macro_f1": 0.347928,
"accuracy": 0.4305,
"macro_precision": 0.446685,
"macro_recall": 0.370443
},
"intent": {
"macro_f1": 0.408329,
"accuracy": 0.415333,
"macro_precision": 0.492138,
"macro_recall": 0.438683
},
"topic": {
"macro_f1": 0.437059,
"accuracy": 0.470273,
"macro_precision": 0.524838,
"macro_recall": 0.499959
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.5109429359436035,
"accuracy": 0.5419999957084656,
"macro_precision": 0.5463839769363403,
"macro_recall": 0.5358102321624756
},
"imdb": {
"macro_f1": 0.5104212760925293,
"accuracy": 0.5370000004768372,
"macro_precision": 0.538789689540863,
"macro_recall": 0.5313011407852173
},
"appreviews": {
"macro_f1": 0.5057665705680847,
"accuracy": 0.5410000085830688,
"macro_precision": 0.5537739992141724,
"macro_recall": 0.5388984680175781
},
"yelpreviews": {
"macro_f1": 0.4381364583969116,
"accuracy": 0.47699999809265137,
"macro_precision": 0.4606666564941406,
"macro_recall": 0.4704800248146057
},
"rottentomatoes": {
"macro_f1": 0.4867360293865204,
"accuracy": 0.5112570524215698,
"macro_precision": 0.5139164328575134,
"macro_recall": 0.5112570524215698
},
"financialphrasebank": {
"macro_f1": 0.2830798029899597,
"accuracy": 0.30000001192092896,
"macro_precision": 0.3607962727546692,
"macro_recall": 0.37093403935432434
},
"emotiondair": {
"macro_f1": 0.32957401871681213,
"accuracy": 0.46299999952316284,
"macro_precision": 0.4165148138999939,
"macro_recall": 0.3467283248901367
},
"empathetic": {
"macro_f1": 0.36628174781799316,
"accuracy": 0.39800000190734863,
"macro_precision": 0.47685420513153076,
"macro_recall": 0.39415672421455383
},
"banking77": {
"macro_f1": 0.39238879084587097,
"accuracy": 0.40799999237060547,
"macro_precision": 0.5394270420074463,
"macro_recall": 0.4355233311653137
},
"biasframes_intent": {
"macro_f1": 0.46464645862579346,
"accuracy": 0.4699999988079071,
"macro_precision": 0.47887909412384033,
"macro_recall": 0.48037463426589966
},
"massive": {
"macro_f1": 0.3679518401622772,
"accuracy": 0.36800000071525574,
"macro_precision": 0.45810678601264954,
"macro_recall": 0.4001505970954895
},
"agnews": {
"macro_f1": 0.6745220422744751,
"accuracy": 0.6769999861717224,
"macro_precision": 0.7280982732772827,
"macro_recall": 0.6772974729537964
},
"yahootopics": {
"macro_f1": 0.4614385962486267,
"accuracy": 0.5040000081062317,
"macro_precision": 0.511243462562561,
"macro_recall": 0.45707938075065613
},
"trueteacher": {
"macro_f1": 0.4631797969341278,
"accuracy": 0.4970000088214874,
"macro_precision": 0.49218320846557617,
"macro_recall": 0.49405914545059204
},
"manifesto": {
"macro_f1": 0.10245990008115768,
"accuracy": 0.16899999976158142,
"macro_precision": 0.23749299347400665,
"macro_recall": 0.11978432536125183
},
"capsotu": {
"macro_f1": 0.4401363432407379,
"accuracy": 0.4970000088214874,
"macro_precision": 0.4854896664619446,
"macro_recall": 0.4804648756980896
},
"biasframes_offensive": {
"macro_f1": 0.5662714242935181,
"accuracy": 0.6029999852180481,
"macro_precision": 0.5913103818893433,
"macro_recall": 0.5743184685707092
},
"biasframes_sex": {
"macro_f1": 0.29700398445129395,
"accuracy": 0.34599998593330383,
"macro_precision": 0.5038384795188904,
"macro_recall": 0.5153378844261169
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.5338513255119324,
"accuracy": 0.5550000071525574,
"macro_precision": 0.5765066146850586,
"macro_recall": 0.5609171986579895
},
"wikitoxic_obscene": {
"macro_f1": 0.5200477242469788,
"accuracy": 0.5299999713897705,
"macro_precision": 0.5842834711074829,
"macro_recall": 0.5689176321029663
},
"wikitoxic_threat": {
"macro_f1": 0.2524811029434204,
"accuracy": 0.29100000858306885,
"macro_precision": 0.4998076260089874,
"macro_recall": 0.4990966320037842
},
"wikitoxic_insult": {
"macro_f1": 0.49625441431999207,
"accuracy": 0.5040000081062317,
"macro_precision": 0.5629687905311584,
"macro_recall": 0.5522796511650085
}
}
} |
1.0 | {
"name": "Mistral-Nemo-Instruct-2407",
"model_type": "llm",
"params": "12.2B",
"revision": "unknown",
"url": "https://huggingface.co/Mistral-Nemo-Instruct-2407"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.669733,
"accuracy": 0.712022,
"macro_precision": 0.726725,
"macro_recall": 0.693765
},
"by_task": {
"sentiment": {
"macro_f1": 0.841272,
"accuracy": 0.846579,
"macro_precision": 0.887509,
"macro_recall": 0.860019
},
"emotion": {
"macro_f1": 0.36281,
"accuracy": 0.4355,
"macro_precision": 0.474136,
"macro_recall": 0.369017
},
"intent": {
"macro_f1": 0.455073,
"accuracy": 0.489667,
"macro_precision": 0.561242,
"macro_recall": 0.489979
},
"topic": {
"macro_f1": 0.690513,
"accuracy": 0.749545,
"macro_precision": 0.730082,
"macro_recall": 0.717704
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9440000057220459,
"accuracy": 0.9440000057220459,
"macro_precision": 0.9446386098861694,
"macro_recall": 0.9446386098861694
},
"imdb": {
"macro_f1": 0.9489974975585938,
"accuracy": 0.9490000009536743,
"macro_precision": 0.9502403736114502,
"macro_recall": 0.9498240947723389
},
"appreviews": {
"macro_f1": 0.8988128900527954,
"accuracy": 0.8989999890327454,
"macro_precision": 0.9029363393783569,
"macro_recall": 0.8994015455245972
},
"yelpreviews": {
"macro_f1": 0.9679994583129883,
"accuracy": 0.9679999947547913,
"macro_precision": 0.9689841270446777,
"macro_recall": 0.9687588810920715
},
"rottentomatoes": {
"macro_f1": 0.539020836353302,
"accuracy": 0.6078799366950989,
"macro_precision": 0.76802659034729,
"macro_recall": 0.6078799366950989
},
"financialphrasebank": {
"macro_f1": 0.748801052570343,
"accuracy": 0.7115942239761353,
"macro_precision": 0.7902305722236633,
"macro_recall": 0.7896091938018799
},
"emotiondair": {
"macro_f1": 0.4354870319366455,
"accuracy": 0.5529999732971191,
"macro_precision": 0.5459252595901489,
"macro_recall": 0.4336623549461365
},
"empathetic": {
"macro_f1": 0.2901332378387451,
"accuracy": 0.3179999887943268,
"macro_precision": 0.40234678983688354,
"macro_recall": 0.3043708801269531
},
"banking77": {
"macro_f1": 0.3519081473350525,
"accuracy": 0.3880000114440918,
"macro_precision": 0.44835615158081055,
"macro_recall": 0.4029290974140167
},
"biasframes_intent": {
"macro_f1": 0.6547112464904785,
"accuracy": 0.6930000185966492,
"macro_precision": 0.761665940284729,
"macro_recall": 0.6715710163116455
},
"massive": {
"macro_f1": 0.35859915614128113,
"accuracy": 0.3880000114440918,
"macro_precision": 0.47370296716690063,
"macro_recall": 0.3954383432865143
},
"agnews": {
"macro_f1": 0.8372315168380737,
"accuracy": 0.8360000252723694,
"macro_precision": 0.8508498668670654,
"macro_recall": 0.8379960060119629
},
"yahootopics": {
"macro_f1": 0.5907819867134094,
"accuracy": 0.6549999713897705,
"macro_precision": 0.6396505236625671,
"macro_recall": 0.5971412062644958
},
"trueteacher": {
"macro_f1": 0.409269243478775,
"accuracy": 0.5370000004768372,
"macro_precision": 0.7002339959144592,
"macro_recall": 0.531496524810791
},
"manifesto": {
"macro_f1": 0.1788283735513687,
"accuracy": 0.2630000114440918,
"macro_precision": 0.24893233180046082,
"macro_recall": 0.20043033361434937
},
"capsotu": {
"macro_f1": 0.5104762315750122,
"accuracy": 0.6010000109672546,
"macro_precision": 0.6091029644012451,
"macro_recall": 0.5213670134544373
},
"biasframes_offensive": {
"macro_f1": 0.7859442234039307,
"accuracy": 0.7879999876022339,
"macro_precision": 0.7849413156509399,
"macro_recall": 0.7890359163284302
},
"biasframes_sex": {
"macro_f1": 0.8135557174682617,
"accuracy": 0.9549999833106995,
"macro_precision": 0.787163257598877,
"macro_recall": 0.8466761708259583
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.8477563858032227,
"accuracy": 0.8479999899864197,
"macro_precision": 0.8483136892318726,
"macro_recall": 0.8475329279899597
},
"wikitoxic_obscene": {
"macro_f1": 0.911335825920105,
"accuracy": 0.9129999876022339,
"macro_precision": 0.9089280366897583,
"macro_recall": 0.915145754814148
},
"wikitoxic_threat": {
"macro_f1": 0.8293161392211914,
"accuracy": 0.9629999995231628,
"macro_precision": 0.7719568014144897,
"macro_recall": 0.9264454245567322
},
"wikitoxic_insult": {
"macro_f1": 0.881150484085083,
"accuracy": 0.8859999775886536,
"macro_precision": 0.8808333873748779,
"macro_recall": 0.8814754486083984
}
}
} |
1.0 | {
"name": "Phi-4-mini-instruct",
"model_type": "llm",
"params": "3.8B",
"revision": "unknown",
"url": "https://huggingface.co/Phi-4-mini-instruct"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.430866,
"accuracy": 0.46549,
"macro_precision": 0.560353,
"macro_recall": 0.496716
},
"by_task": {
"sentiment": {
"macro_f1": 0.490137,
"accuracy": 0.53863,
"macro_precision": 0.639548,
"macro_recall": 0.56505
},
"emotion": {
"macro_f1": 0.300248,
"accuracy": 0.3265,
"macro_precision": 0.397343,
"macro_recall": 0.318852
},
"intent": {
"macro_f1": 0.3691,
"accuracy": 0.391333,
"macro_precision": 0.456704,
"macro_recall": 0.398316
},
"topic": {
"macro_f1": 0.43913,
"accuracy": 0.471091,
"macro_precision": 0.575062,
"macro_recall": 0.518619
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.5550732612609863,
"accuracy": 0.6150000095367432,
"macro_precision": 0.7121282815933228,
"macro_recall": 0.6058675646781921
},
"imdb": {
"macro_f1": 0.5613990426063538,
"accuracy": 0.6129999756813049,
"macro_precision": 0.6850106120109558,
"macro_recall": 0.6044906377792358
},
"appreviews": {
"macro_f1": 0.5450122356414795,
"accuracy": 0.6029999852180481,
"macro_precision": 0.6997272968292236,
"macro_recall": 0.6001824140548706
},
"yelpreviews": {
"macro_f1": 0.5233877301216125,
"accuracy": 0.597000002861023,
"macro_precision": 0.706250011920929,
"macro_recall": 0.5871789455413818
},
"rottentomatoes": {
"macro_f1": 0.5285159349441528,
"accuracy": 0.5834896564483643,
"macro_precision": 0.6564617156982422,
"macro_recall": 0.5834896564483643
},
"financialphrasebank": {
"macro_f1": 0.22743332386016846,
"accuracy": 0.22028985619544983,
"macro_precision": 0.3777104318141937,
"macro_recall": 0.4090906083583832
},
"emotiondair": {
"macro_f1": 0.2960258722305298,
"accuracy": 0.33899998664855957,
"macro_precision": 0.389321893453598,
"macro_recall": 0.3211808204650879
},
"empathetic": {
"macro_f1": 0.30447065830230713,
"accuracy": 0.3140000104904175,
"macro_precision": 0.40536338090896606,
"macro_recall": 0.3165235221385956
},
"banking77": {
"macro_f1": 0.4160025417804718,
"accuracy": 0.42500001192092896,
"macro_precision": 0.5339058637619019,
"macro_recall": 0.4476704001426697
},
"biasframes_intent": {
"macro_f1": 0.4069782495498657,
"accuracy": 0.43700000643730164,
"macro_precision": 0.44053563475608826,
"macro_recall": 0.45673811435699463
},
"massive": {
"macro_f1": 0.2843197286128998,
"accuracy": 0.31200000643730164,
"macro_precision": 0.395669162273407,
"macro_recall": 0.2905403673648834
},
"agnews": {
"macro_f1": 0.5731089115142822,
"accuracy": 0.5619999766349792,
"macro_precision": 0.7408709526062012,
"macro_recall": 0.5561092495918274
},
"yahootopics": {
"macro_f1": 0.4667598307132721,
"accuracy": 0.4519999921321869,
"macro_precision": 0.5803982615470886,
"macro_recall": 0.45940494537353516
},
"trueteacher": {
"macro_f1": 0.4678521752357483,
"accuracy": 0.503000020980835,
"macro_precision": 0.4999839663505554,
"macro_recall": 0.49998798966407776
},
"manifesto": {
"macro_f1": 0.14805404841899872,
"accuracy": 0.2980000078678131,
"macro_precision": 0.23859953880310059,
"macro_recall": 0.17197829484939575
},
"capsotu": {
"macro_f1": 0.4624970257282257,
"accuracy": 0.45399999618530273,
"macro_precision": 0.5515788793563843,
"macro_recall": 0.47562360763549805
},
"biasframes_offensive": {
"macro_f1": 0.5785272121429443,
"accuracy": 0.640999972820282,
"macro_precision": 0.6652180552482605,
"macro_recall": 0.6001869440078735
},
"biasframes_sex": {
"macro_f1": 0.2553952634334564,
"accuracy": 0.2840000092983246,
"macro_precision": 0.5019315481185913,
"macro_recall": 0.5066988468170166
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.5594816207885742,
"accuracy": 0.6010000109672546,
"macro_precision": 0.6851626038551331,
"macro_recall": 0.6093939542770386
},
"wikitoxic_obscene": {
"macro_f1": 0.5347299575805664,
"accuracy": 0.5509999990463257,
"macro_precision": 0.6338613629341125,
"macro_recall": 0.5977449417114258
},
"wikitoxic_threat": {
"macro_f1": 0.2527029812335968,
"accuracy": 0.28200000524520874,
"macro_precision": 0.5235722661018372,
"macro_recall": 0.6027957201004028
},
"wikitoxic_insult": {
"macro_f1": 0.5313156843185425,
"accuracy": 0.5540000200271606,
"macro_precision": 0.7045025825500488,
"macro_recall": 0.6248852014541626
}
}
} |
1.0 | {
"name": "Qwen3-4B",
"model_type": "llm",
"params": "4B",
"revision": "unknown",
"url": "https://huggingface.co/Qwen3-4B"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.648575,
"accuracy": 0.696985,
"macro_precision": 0.710996,
"macro_recall": 0.692165
},
"by_task": {
"sentiment": {
"macro_f1": 0.88324,
"accuracy": 0.875612,
"macro_precision": 0.894227,
"macro_recall": 0.89537
},
"emotion": {
"macro_f1": 0.37276,
"accuracy": 0.4415,
"macro_precision": 0.484641,
"macro_recall": 0.373105
},
"intent": {
"macro_f1": 0.400667,
"accuracy": 0.457333,
"macro_precision": 0.545104,
"macro_recall": 0.464412
},
"topic": {
"macro_f1": 0.638335,
"accuracy": 0.711364,
"macro_precision": 0.697452,
"macro_recall": 0.701451
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9448078870773315,
"accuracy": 0.9449999928474426,
"macro_precision": 0.9475924372673035,
"macro_recall": 0.9441041946411133
},
"imdb": {
"macro_f1": 0.9235104322433472,
"accuracy": 0.9240000247955322,
"macro_precision": 0.9299787282943726,
"macro_recall": 0.9225436449050903
},
"appreviews": {
"macro_f1": 0.9166899919509888,
"accuracy": 0.9169999957084656,
"macro_precision": 0.922028660774231,
"macro_recall": 0.9165706634521484
},
"yelpreviews": {
"macro_f1": 0.9839959144592285,
"accuracy": 0.984000027179718,
"macro_precision": 0.9839394092559814,
"macro_recall": 0.9842493534088135
},
"rottentomatoes": {
"macro_f1": 0.8675339221954346,
"accuracy": 0.8677298426628113,
"macro_precision": 0.869918704032898,
"macro_recall": 0.8677297830581665
},
"financialphrasebank": {
"macro_f1": 0.6628990769386292,
"accuracy": 0.6159420013427734,
"macro_precision": 0.7119026184082031,
"macro_recall": 0.7370214462280273
},
"emotiondair": {
"macro_f1": 0.4271164834499359,
"accuracy": 0.5440000295639038,
"macro_precision": 0.5664623975753784,
"macro_recall": 0.40451374650001526
},
"empathetic": {
"macro_f1": 0.3184030055999756,
"accuracy": 0.33899998664855957,
"macro_precision": 0.4028189182281494,
"macro_recall": 0.34169602394104004
},
"banking77": {
"macro_f1": 0.37111204862594604,
"accuracy": 0.3959999978542328,
"macro_precision": 0.4828844964504242,
"macro_recall": 0.42354440689086914
},
"biasframes_intent": {
"macro_f1": 0.4490644335746765,
"accuracy": 0.5759999752044678,
"macro_precision": 0.6940479278564453,
"macro_recall": 0.5426543951034546
},
"massive": {
"macro_f1": 0.38182488083839417,
"accuracy": 0.4000000059604645,
"macro_precision": 0.4583800435066223,
"macro_recall": 0.4270361065864563
},
"agnews": {
"macro_f1": 0.822451114654541,
"accuracy": 0.824999988079071,
"macro_precision": 0.8497323393821716,
"macro_recall": 0.8233746290206909
},
"yahootopics": {
"macro_f1": 0.5222913026809692,
"accuracy": 0.5580000281333923,
"macro_precision": 0.6449465155601501,
"macro_recall": 0.5097914338111877
},
"trueteacher": {
"macro_f1": 0.34269624948501587,
"accuracy": 0.5090000033378601,
"macro_precision": 0.7537612915039062,
"macro_recall": 0.5030364394187927
},
"manifesto": {
"macro_f1": 0.16602693498134613,
"accuracy": 0.2939999997615814,
"macro_precision": 0.2201460599899292,
"macro_recall": 0.20944738388061523
},
"capsotu": {
"macro_f1": 0.5471009016036987,
"accuracy": 0.5950000286102295,
"macro_precision": 0.62078857421875,
"macro_recall": 0.5773991346359253
},
"biasframes_offensive": {
"macro_f1": 0.6946063041687012,
"accuracy": 0.7139999866485596,
"macro_precision": 0.7192214131355286,
"macro_recall": 0.6926696300506592
},
"biasframes_sex": {
"macro_f1": 0.6385217905044556,
"accuracy": 0.828000009059906,
"macro_precision": 0.6154297590255737,
"macro_recall": 0.8601654767990112
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.8698828816413879,
"accuracy": 0.8700000047683716,
"macro_precision": 0.8699856996536255,
"macro_recall": 0.8698079586029053
},
"wikitoxic_obscene": {
"macro_f1": 0.8922058343887329,
"accuracy": 0.8949999809265137,
"macro_precision": 0.8918255567550659,
"macro_recall": 0.8926034569740295
},
"wikitoxic_threat": {
"macro_f1": 0.6722221970558167,
"accuracy": 0.8820000290870667,
"macro_precision": 0.6296330094337463,
"macro_recall": 0.9057626724243164
},
"wikitoxic_insult": {
"macro_f1": 0.8536794781684875,
"accuracy": 0.8550000190734863,
"macro_precision": 0.8564978837966919,
"macro_recall": 0.8719052076339722
}
}
} |
1.0 | {
"name": "Qwen3-8B",
"model_type": "llm",
"params": "8.2B",
"revision": "unknown",
"url": "https://huggingface.co/Qwen3-8B"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.664912,
"accuracy": 0.709153,
"macro_precision": 0.699978,
"macro_recall": 0.703688
},
"by_task": {
"sentiment": {
"macro_f1": 0.89867,
"accuracy": 0.899727,
"macro_precision": 0.900424,
"macro_recall": 0.914296
},
"emotion": {
"macro_f1": 0.324137,
"accuracy": 0.398,
"macro_precision": 0.488157,
"macro_recall": 0.32666
},
"intent": {
"macro_f1": 0.481523,
"accuracy": 0.501667,
"macro_precision": 0.57502,
"macro_recall": 0.513194
},
"topic": {
"macro_f1": 0.649383,
"accuracy": 0.718364,
"macro_precision": 0.663237,
"macro_recall": 0.709315
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9448207020759583,
"accuracy": 0.9449999928474426,
"macro_precision": 0.9473200440406799,
"macro_recall": 0.944156289100647
},
"imdb": {
"macro_f1": 0.9378049373626709,
"accuracy": 0.9380000233650208,
"macro_precision": 0.9401372671127319,
"macro_recall": 0.9371775388717651
},
"appreviews": {
"macro_f1": 0.9175726771354675,
"accuracy": 0.9179999828338623,
"macro_precision": 0.9253227710723877,
"macro_recall": 0.9174827337265015
},
"yelpreviews": {
"macro_f1": 0.9829938411712646,
"accuracy": 0.9829999804496765,
"macro_precision": 0.9829134941101074,
"macro_recall": 0.9831706285476685
},
"rottentomatoes": {
"macro_f1": 0.8576008677482605,
"accuracy": 0.8592870831489563,
"macro_precision": 0.8771510124206543,
"macro_recall": 0.8592870235443115
},
"financialphrasebank": {
"macro_f1": 0.7512251138687134,
"accuracy": 0.7550724744796753,
"macro_precision": 0.72970050573349,
"macro_recall": 0.8444997072219849
},
"emotiondair": {
"macro_f1": 0.4043771028518677,
"accuracy": 0.5360000133514404,
"macro_precision": 0.5580412149429321,
"macro_recall": 0.38969686627388
},
"empathetic": {
"macro_f1": 0.2438962459564209,
"accuracy": 0.25999999046325684,
"macro_precision": 0.4182721972465515,
"macro_recall": 0.2636229991912842
},
"banking77": {
"macro_f1": 0.38651391863822937,
"accuracy": 0.39500001072883606,
"macro_precision": 0.5226321816444397,
"macro_recall": 0.4338746964931488
},
"biasframes_intent": {
"macro_f1": 0.672095775604248,
"accuracy": 0.6959999799728394,
"macro_precision": 0.7273061275482178,
"macro_recall": 0.6794042587280273
},
"massive": {
"macro_f1": 0.38596004247665405,
"accuracy": 0.414000004529953,
"macro_precision": 0.4751225709915161,
"macro_recall": 0.42630308866500854
},
"agnews": {
"macro_f1": 0.8517557382583618,
"accuracy": 0.8529999852180481,
"macro_precision": 0.8622773885726929,
"macro_recall": 0.8527622222900391
},
"yahootopics": {
"macro_f1": 0.5463981628417969,
"accuracy": 0.5770000219345093,
"macro_precision": 0.6372009515762329,
"macro_recall": 0.5259219408035278
},
"trueteacher": {
"macro_f1": 0.335989385843277,
"accuracy": 0.5059999823570251,
"macro_precision": 0.2529999911785126,
"macro_recall": 0.5
},
"manifesto": {
"macro_f1": 0.18233036994934082,
"accuracy": 0.289000004529953,
"macro_precision": 0.23162594437599182,
"macro_recall": 0.21952621638774872
},
"capsotu": {
"macro_f1": 0.5280041694641113,
"accuracy": 0.5690000057220459,
"macro_precision": 0.6109967827796936,
"macro_recall": 0.5517712831497192
},
"biasframes_offensive": {
"macro_f1": 0.7154461145401001,
"accuracy": 0.746999979019165,
"macro_precision": 0.794788122177124,
"macro_recall": 0.7152205109596252
},
"biasframes_sex": {
"macro_f1": 0.6741390228271484,
"accuracy": 0.8550000190734863,
"macro_precision": 0.6377258896827698,
"macro_recall": 0.8987663984298706
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.8597756624221802,
"accuracy": 0.8600000143051147,
"macro_precision": 0.8654845952987671,
"macro_recall": 0.8616224527359009
},
"wikitoxic_obscene": {
"macro_f1": 0.8883872628211975,
"accuracy": 0.890999972820282,
"macro_precision": 0.8871627449989319,
"macro_recall": 0.8898411989212036
},
"wikitoxic_threat": {
"macro_f1": 0.7205759882926941,
"accuracy": 0.9139999747276306,
"macro_precision": 0.6636102795600891,
"macro_recall": 0.9224990606307983
},
"wikitoxic_insult": {
"macro_f1": 0.8404061794281006,
"accuracy": 0.8410000205039978,
"macro_precision": 0.8517290353775024,
"macro_recall": 0.8645344972610474
}
}
} |
1.0 | {
"name": "gemma-3-1b-it",
"model_type": "llm",
"params": "1B",
"revision": "unknown",
"url": "https://huggingface.co/gemma-3-1b-it"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.359127,
"accuracy": 0.400456,
"macro_precision": 0.464795,
"macro_recall": 0.430821
},
"by_task": {
"sentiment": {
"macro_f1": 0.519361,
"accuracy": 0.558006,
"macro_precision": 0.642945,
"macro_recall": 0.588836
},
"emotion": {
"macro_f1": 0.143654,
"accuracy": 0.2005,
"macro_precision": 0.218937,
"macro_recall": 0.20488
},
"intent": {
"macro_f1": 0.235606,
"accuracy": 0.263333,
"macro_precision": 0.307808,
"macro_recall": 0.266785
},
"topic": {
"macro_f1": 0.344591,
"accuracy": 0.388273,
"macro_precision": 0.455137,
"macro_recall": 0.430449
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.5177582502365112,
"accuracy": 0.5929999947547913,
"macro_precision": 0.699536919593811,
"macro_recall": 0.5831242203712463
},
"imdb": {
"macro_f1": 0.5621848702430725,
"accuracy": 0.609000027179718,
"macro_precision": 0.666501522064209,
"macro_recall": 0.6009042263031006
},
"appreviews": {
"macro_f1": 0.6798030138015747,
"accuracy": 0.6909999847412109,
"macro_precision": 0.7188526391983032,
"macro_recall": 0.689548134803772
},
"yelpreviews": {
"macro_f1": 0.5339144468307495,
"accuracy": 0.6029999852180481,
"macro_precision": 0.7090381383895874,
"macro_recall": 0.5933911204338074
},
"rottentomatoes": {
"macro_f1": 0.5737664699554443,
"accuracy": 0.6013132929801941,
"macro_precision": 0.6366355419158936,
"macro_recall": 0.6013133525848389
},
"financialphrasebank": {
"macro_f1": 0.24874034523963928,
"accuracy": 0.2507246434688568,
"macro_precision": 0.4271034598350525,
"macro_recall": 0.46473199129104614
},
"emotiondair": {
"macro_f1": 0.130890890955925,
"accuracy": 0.1860000044107437,
"macro_precision": 0.19724853336811066,
"macro_recall": 0.2009340524673462
},
"empathetic": {
"macro_f1": 0.15641769766807556,
"accuracy": 0.2150000035762787,
"macro_precision": 0.24062468111515045,
"macro_recall": 0.20882539451122284
},
"banking77": {
"macro_f1": 0.10407334566116333,
"accuracy": 0.14399999380111694,
"macro_precision": 0.17591653764247894,
"macro_recall": 0.14734937250614166
},
"biasframes_intent": {
"macro_f1": 0.5240811705589294,
"accuracy": 0.5569999814033508,
"macro_precision": 0.5500313639640808,
"macro_recall": 0.5401318073272705
},
"massive": {
"macro_f1": 0.07866360247135162,
"accuracy": 0.08900000154972076,
"macro_precision": 0.1974775344133377,
"macro_recall": 0.11287318170070648
},
"agnews": {
"macro_f1": 0.3620033264160156,
"accuracy": 0.3930000066757202,
"macro_precision": 0.49714550375938416,
"macro_recall": 0.384908527135849
},
"yahootopics": {
"macro_f1": 0.17090842127799988,
"accuracy": 0.1940000057220459,
"macro_precision": 0.30175936222076416,
"macro_recall": 0.22860918939113617
},
"trueteacher": {
"macro_f1": 0.3920586407184601,
"accuracy": 0.46399998664855957,
"macro_precision": 0.4262353181838989,
"macro_recall": 0.4599382281303406
},
"manifesto": {
"macro_f1": 0.007554742507636547,
"accuracy": 0.04399999976158142,
"macro_precision": 0.019994866102933884,
"macro_recall": 0.027067728340625763
},
"capsotu": {
"macro_f1": 0.16142979264259338,
"accuracy": 0.19599999487400055,
"macro_precision": 0.291238009929657,
"macro_recall": 0.1541706770658493
},
"biasframes_offensive": {
"macro_f1": 0.480392187833786,
"accuracy": 0.5249999761581421,
"macro_precision": 0.4933592677116394,
"macro_recall": 0.49461936950683594
},
"biasframes_sex": {
"macro_f1": 0.4423656463623047,
"accuracy": 0.5789999961853027,
"macro_precision": 0.5325133204460144,
"macro_recall": 0.6471008062362671
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.515433132648468,
"accuracy": 0.5619999766349792,
"macro_precision": 0.6208167672157288,
"macro_recall": 0.5704455971717834
},
"wikitoxic_obscene": {
"macro_f1": 0.5521719455718994,
"accuracy": 0.5680000185966492,
"macro_precision": 0.6584980487823486,
"macro_recall": 0.6153833866119385
},
"wikitoxic_threat": {
"macro_f1": 0.20803073048591614,
"accuracy": 0.2240000069141388,
"macro_precision": 0.5171197652816772,
"macro_recall": 0.5616204142570496
},
"wikitoxic_insult": {
"macro_f1": 0.4981500804424286,
"accuracy": 0.5220000147819519,
"macro_precision": 0.6478319764137268,
"macro_recall": 0.5910699367523193
}
}
} |
1.0 | {
"name": "gemma-3-270m-it",
"model_type": "llm",
"params": "270M",
"revision": "unknown",
"url": "https://huggingface.co/gemma-3-270m-it"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.279441,
"accuracy": 0.314287,
"macro_precision": 0.341366,
"macro_recall": 0.340519
},
"by_task": {
"sentiment": {
"macro_f1": 0.42075,
"accuracy": 0.46572,
"macro_precision": 0.476907,
"macro_recall": 0.472474
},
"emotion": {
"macro_f1": 0.037215,
"accuracy": 0.0725,
"macro_precision": 0.026191,
"macro_recall": 0.088977
},
"intent": {
"macro_f1": 0.129629,
"accuracy": 0.143333,
"macro_precision": 0.136884,
"macro_recall": 0.148515
},
"topic": {
"macro_f1": 0.287262,
"accuracy": 0.322273,
"macro_precision": 0.380507,
"macro_recall": 0.366643
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.46282345056533813,
"accuracy": 0.5080000162124634,
"macro_precision": 0.5011513829231262,
"macro_recall": 0.5007985234260559
},
"imdb": {
"macro_f1": 0.4741121828556061,
"accuracy": 0.5239999890327454,
"macro_precision": 0.5250498056411743,
"macro_recall": 0.5163410305976868
},
"appreviews": {
"macro_f1": 0.48126956820487976,
"accuracy": 0.5149999856948853,
"macro_precision": 0.5173690319061279,
"macro_recall": 0.5129928588867188
},
"yelpreviews": {
"macro_f1": 0.42642542719841003,
"accuracy": 0.4740000069141388,
"macro_precision": 0.45247310400009155,
"macro_recall": 0.4668275713920593
},
"rottentomatoes": {
"macro_f1": 0.4736323356628418,
"accuracy": 0.5037523508071899,
"macro_precision": 0.5048661231994629,
"macro_recall": 0.5037523508071899
},
"financialphrasebank": {
"macro_f1": 0.2062343955039978,
"accuracy": 0.269565224647522,
"macro_precision": 0.3605327308177948,
"macro_recall": 0.33413049578666687
},
"emotiondair": {
"macro_f1": 0.06215536221861839,
"accuracy": 0.10700000077486038,
"macro_precision": 0.04352770000696182,
"macro_recall": 0.14155223965644836
},
"empathetic": {
"macro_f1": 0.012273683212697506,
"accuracy": 0.03799999877810478,
"macro_precision": 0.008854017592966557,
"macro_recall": 0.0364009328186512
},
"banking77": {
"macro_f1": 0.002360794460400939,
"accuracy": 0.010999999940395355,
"macro_precision": 0.002926028799265623,
"macro_recall": 0.01021870318800211
},
"biasframes_intent": {
"macro_f1": 0.3849015235900879,
"accuracy": 0.4099999964237213,
"macro_precision": 0.40675675868988037,
"macro_recall": 0.42782309651374817
},
"massive": {
"macro_f1": 0.0016255928203463554,
"accuracy": 0.008999999612569809,
"macro_precision": 0.0009695125627331436,
"macro_recall": 0.007502308581024408
},
"agnews": {
"macro_f1": 0.17617017030715942,
"accuracy": 0.23800000548362732,
"macro_precision": 0.246751606464386,
"macro_recall": 0.23454733192920685
},
"yahootopics": {
"macro_f1": 0.03481292724609375,
"accuracy": 0.07500000298023224,
"macro_precision": 0.12142021954059601,
"macro_recall": 0.07161495834589005
},
"trueteacher": {
"macro_f1": 0.4440944492816925,
"accuracy": 0.5049999952316284,
"macro_precision": 0.5019047260284424,
"macro_recall": 0.5011001825332642
},
"manifesto": {
"macro_f1": 0.00011458253720775247,
"accuracy": 0.003000000026077032,
"macro_precision": 0.00005746576061937958,
"macro_recall": 0.01886792480945587
},
"capsotu": {
"macro_f1": 0.0343693271279335,
"accuracy": 0.08900000154972076,
"macro_precision": 0.04259488359093666,
"macro_recall": 0.04955240711569786
},
"biasframes_offensive": {
"macro_f1": 0.5426101684570312,
"accuracy": 0.5860000252723694,
"macro_precision": 0.5699592232704163,
"macro_recall": 0.5546879768371582
},
"biasframes_sex": {
"macro_f1": 0.2550809681415558,
"accuracy": 0.28700000047683716,
"macro_precision": 0.4932486414909363,
"macro_recall": 0.47593164443969727
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.5071262121200562,
"accuracy": 0.5419999957084656,
"macro_precision": 0.5715240240097046,
"macro_recall": 0.5492873191833496
},
"wikitoxic_obscene": {
"macro_f1": 0.4925040304660797,
"accuracy": 0.5120000243186951,
"macro_precision": 0.5833622813224792,
"macro_recall": 0.5591838359832764
},
"wikitoxic_threat": {
"macro_f1": 0.20182383060455322,
"accuracy": 0.22100000083446503,
"macro_precision": 0.492961049079895,
"macro_recall": 0.47332635521888733
},
"wikitoxic_insult": {
"macro_f1": 0.4711727499961853,
"accuracy": 0.4869999885559082,
"macro_precision": 0.5617921352386475,
"macro_recall": 0.5449715256690979
}
}
} |
1.0 | {
"name": "bart-large-mnli",
"model_type": "nli",
"params": "407M",
"revision": "unknown",
"url": "https://huggingface.co/bart-large-mnli"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.507865,
"accuracy": 0.532448,
"macro_precision": 0.656902,
"macro_recall": 0.583081
},
"by_task": {
"sentiment": {
"macro_f1": 0.839767,
"accuracy": 0.83231,
"macro_precision": 0.871047,
"macro_recall": 0.874735
},
"emotion": {
"macro_f1": 0.411883,
"accuracy": 0.447,
"macro_precision": 0.480611,
"macro_recall": 0.431878
},
"intent": {
"macro_f1": 0.431855,
"accuracy": 0.449667,
"macro_precision": 0.560196,
"macro_recall": 0.447435
},
"topic": {
"macro_f1": 0.365008,
"accuracy": 0.407,
"macro_precision": 0.598524,
"macro_recall": 0.488483
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9329510927200317,
"accuracy": 0.9330000281333923,
"macro_precision": 0.9329754114151001,
"macro_recall": 0.9329286813735962
},
"imdb": {
"macro_f1": 0.9289840459823608,
"accuracy": 0.9290000200271606,
"macro_precision": 0.9289548397064209,
"macro_recall": 0.9292382001876831
},
"appreviews": {
"macro_f1": 0.9209651350975037,
"accuracy": 0.9210000038146973,
"macro_precision": 0.9213510751724243,
"macro_recall": 0.9208909273147583
},
"yelpreviews": {
"macro_f1": 0.9589999914169312,
"accuracy": 0.9589999914169312,
"macro_precision": 0.9597244262695312,
"macro_recall": 0.9596747159957886
},
"rottentomatoes": {
"macro_f1": 0.8316553235054016,
"accuracy": 0.8330206274986267,
"macro_precision": 0.8441861867904663,
"macro_recall": 0.8330206274986267
},
"financialphrasebank": {
"macro_f1": 0.46504926681518555,
"accuracy": 0.41884058713912964,
"macro_precision": 0.6390920877456665,
"macro_recall": 0.6726571917533875
},
"emotiondair": {
"macro_f1": 0.43902286887168884,
"accuracy": 0.5049999952316284,
"macro_precision": 0.4808715879917145,
"macro_recall": 0.4806637763977051
},
"empathetic": {
"macro_f1": 0.384743332862854,
"accuracy": 0.3889999985694885,
"macro_precision": 0.4803498387336731,
"macro_recall": 0.38309159874916077
},
"banking77": {
"macro_f1": 0.2827707529067993,
"accuracy": 0.28700000047683716,
"macro_precision": 0.47028210759162903,
"macro_recall": 0.3000609278678894
},
"biasframes_intent": {
"macro_f1": 0.6016557216644287,
"accuracy": 0.6320000290870667,
"macro_precision": 0.6472827196121216,
"macro_recall": 0.6145737767219543
},
"massive": {
"macro_f1": 0.411138653755188,
"accuracy": 0.4300000071525574,
"macro_precision": 0.5630237460136414,
"macro_recall": 0.42766883969306946
},
"agnews": {
"macro_f1": 0.7090643048286438,
"accuracy": 0.7269999980926514,
"macro_precision": 0.7674412727355957,
"macro_recall": 0.7252845168113708
},
"yahootopics": {
"macro_f1": 0.27278462052345276,
"accuracy": 0.30300000309944153,
"macro_precision": 0.5333313345909119,
"macro_recall": 0.27680033445358276
},
"trueteacher": {
"macro_f1": 0.5149956345558167,
"accuracy": 0.5149999856948853,
"macro_precision": 0.5150365233421326,
"macro_recall": 0.5150381326675415
},
"manifesto": {
"macro_f1": 0.08716775476932526,
"accuracy": 0.09000000357627869,
"macro_precision": 0.217774897813797,
"macro_recall": 0.09726245701313019
},
"capsotu": {
"macro_f1": 0.33400794863700867,
"accuracy": 0.39399999380111694,
"macro_precision": 0.5878989696502686,
"macro_recall": 0.36520224809646606
},
"biasframes_offensive": {
"macro_f1": 0.3668980002403259,
"accuracy": 0.5690000057220459,
"macro_precision": 0.7840681076049805,
"macro_recall": 0.5023094415664673
},
"biasframes_sex": {
"macro_f1": 0.07344245910644531,
"accuracy": 0.07500000298023224,
"macro_precision": 0.5295014977455139,
"macro_recall": 0.5090233683586121
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.5524932146072388,
"accuracy": 0.6069999933242798,
"macro_precision": 0.7446534633636475,
"macro_recall": 0.6164907217025757
},
"wikitoxic_obscene": {
"macro_f1": 0.689961314201355,
"accuracy": 0.6930000185966492,
"macro_precision": 0.7562661170959473,
"macro_recall": 0.7288388609886169
},
"wikitoxic_threat": {
"macro_f1": 0.0839853435754776,
"accuracy": 0.08399999886751175,
"macro_precision": 0.5229166746139526,
"macro_recall": 0.5209205150604248
},
"wikitoxic_insult": {
"macro_f1": 0.3302880525588989,
"accuracy": 0.41999998688697815,
"macro_precision": 0.6248708963394165,
"macro_recall": 0.5161438584327698
}
}
} |
1.0 | {
"name": "bert-base-uncased-nli",
"model_type": "nli",
"params": "110M",
"revision": "unknown",
"url": "https://huggingface.co/bert-base-uncased-nli"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.48755,
"accuracy": 0.50482,
"macro_precision": 0.560883,
"macro_recall": 0.541221
},
"by_task": {
"sentiment": {
"macro_f1": 0.760962,
"accuracy": 0.756506,
"macro_precision": 0.794178,
"macro_recall": 0.79018
},
"emotion": {
"macro_f1": 0.255882,
"accuracy": 0.27,
"macro_precision": 0.403987,
"macro_recall": 0.283998
},
"intent": {
"macro_f1": 0.298618,
"accuracy": 0.289333,
"macro_precision": 0.38308,
"macro_recall": 0.296697
},
"topic": {
"macro_f1": 0.432065,
"accuracy": 0.469,
"macro_precision": 0.510649,
"macro_recall": 0.518881
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.862488329410553,
"accuracy": 0.8629999756813049,
"macro_precision": 0.8651169538497925,
"macro_recall": 0.8619967103004456
},
"imdb": {
"macro_f1": 0.7927390336990356,
"accuracy": 0.7940000295639038,
"macro_precision": 0.7973349690437317,
"macro_recall": 0.7925077676773071
},
"appreviews": {
"macro_f1": 0.8378125429153442,
"accuracy": 0.8379999995231628,
"macro_precision": 0.8389803171157837,
"macro_recall": 0.8377816081047058
},
"yelpreviews": {
"macro_f1": 0.8869591951370239,
"accuracy": 0.8870000243186951,
"macro_precision": 0.8868997097015381,
"macro_recall": 0.8871057033538818
},
"rottentomatoes": {
"macro_f1": 0.754468560218811,
"accuracy": 0.7570356726646423,
"macro_precision": 0.7682543992996216,
"macro_recall": 0.7570356726646423
},
"financialphrasebank": {
"macro_f1": 0.4313048720359802,
"accuracy": 0.4000000059604645,
"macro_precision": 0.6084808111190796,
"macro_recall": 0.6046509742736816
},
"emotiondair": {
"macro_f1": 0.36164164543151855,
"accuracy": 0.3880000114440918,
"macro_precision": 0.44819334149360657,
"macro_recall": 0.4212839901447296
},
"empathetic": {
"macro_f1": 0.15012234449386597,
"accuracy": 0.15199999511241913,
"macro_precision": 0.3597811758518219,
"macro_recall": 0.14671137928962708
},
"banking77": {
"macro_f1": 0.011304003186523914,
"accuracy": 0.019999999552965164,
"macro_precision": 0.03405291587114334,
"macro_recall": 0.014650114811956882
},
"biasframes_intent": {
"macro_f1": 0.5621905326843262,
"accuracy": 0.5630000233650208,
"macro_precision": 0.5710198879241943,
"macro_recall": 0.569557785987854
},
"massive": {
"macro_f1": 0.32235872745513916,
"accuracy": 0.2849999964237213,
"macro_precision": 0.5441672801971436,
"macro_recall": 0.30588337779045105
},
"agnews": {
"macro_f1": 0.6833652257919312,
"accuracy": 0.6850000023841858,
"macro_precision": 0.7304670810699463,
"macro_recall": 0.6929008364677429
},
"yahootopics": {
"macro_f1": 0.33737125992774963,
"accuracy": 0.382999986410141,
"macro_precision": 0.45285505056381226,
"macro_recall": 0.34131163358688354
},
"trueteacher": {
"macro_f1": 0.3365929126739502,
"accuracy": 0.492000013589859,
"macro_precision": 0.4464646577835083,
"macro_recall": 0.49787968397140503
},
"manifesto": {
"macro_f1": 0.001060847076587379,
"accuracy": 0.017000000923871994,
"macro_precision": 0.009742955677211285,
"macro_recall": 0.016121715307235718
},
"capsotu": {
"macro_f1": 0.011852134019136429,
"accuracy": 0.019999999552965164,
"macro_precision": 0.06236717849969864,
"macro_recall": 0.05334913730621338
},
"biasframes_offensive": {
"macro_f1": 0.5668436288833618,
"accuracy": 0.5669999718666077,
"macro_precision": 0.5809178352355957,
"macro_recall": 0.5799597501754761
},
"biasframes_sex": {
"macro_f1": 0.4279535114765167,
"accuracy": 0.5260000228881836,
"macro_precision": 0.5509116649627686,
"macro_recall": 0.7322278022766113
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.611831545829773,
"accuracy": 0.6470000147819519,
"macro_precision": 0.7087802886962891,
"macro_recall": 0.6396063566207886
},
"wikitoxic_obscene": {
"macro_f1": 0.7538200616836548,
"accuracy": 0.7680000066757202,
"macro_precision": 0.7689436674118042,
"macro_recall": 0.7487791776657104
},
"wikitoxic_threat": {
"macro_f1": 0.26134955883026123,
"accuracy": 0.2930000126361847,
"macro_precision": 0.5267128944396973,
"macro_recall": 0.619389533996582
},
"wikitoxic_insult": {
"macro_f1": 0.7606723308563232,
"accuracy": 0.7609999775886536,
"macro_precision": 0.7789746522903442,
"macro_recall": 0.7861692309379578
}
}
} |
1.0 | {
"name": "bert-large-uncased-nli-triplet",
"model_type": "nli",
"params": "335M",
"revision": "unknown",
"url": "https://huggingface.co/bert-large-uncased-nli-triplet"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.524601,
"accuracy": 0.553299,
"macro_precision": 0.580262,
"macro_recall": 0.580901
},
"by_task": {
"sentiment": {
"macro_f1": 0.776186,
"accuracy": 0.781098,
"macro_precision": 0.800591,
"macro_recall": 0.801062
},
"emotion": {
"macro_f1": 0.239191,
"accuracy": 0.252,
"macro_precision": 0.370929,
"macro_recall": 0.271308
},
"intent": {
"macro_f1": 0.345063,
"accuracy": 0.348333,
"macro_precision": 0.38974,
"macro_recall": 0.36855
},
"topic": {
"macro_f1": 0.48823,
"accuracy": 0.539727,
"macro_precision": 0.550103,
"macro_recall": 0.575017
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.8373348712921143,
"accuracy": 0.8399999737739563,
"macro_precision": 0.8558336496353149,
"macro_recall": 0.8372379541397095
},
"imdb": {
"macro_f1": 0.7798857092857361,
"accuracy": 0.7879999876022339,
"macro_precision": 0.824999213218689,
"macro_recall": 0.7835376262664795
},
"appreviews": {
"macro_f1": 0.8499616384506226,
"accuracy": 0.8500000238418579,
"macro_precision": 0.8501056432723999,
"macro_recall": 0.8499263525009155
},
"yelpreviews": {
"macro_f1": 0.8968430757522583,
"accuracy": 0.8970000147819519,
"macro_precision": 0.8973985910415649,
"macro_recall": 0.8965920805931091
},
"rottentomatoes": {
"macro_f1": 0.7154526710510254,
"accuracy": 0.7260788083076477,
"macro_precision": 0.7657798528671265,
"macro_recall": 0.7260788083076477
},
"financialphrasebank": {
"macro_f1": 0.5776402950286865,
"accuracy": 0.5855072736740112,
"macro_precision": 0.6094277501106262,
"macro_recall": 0.71299809217453
},
"emotiondair": {
"macro_f1": 0.2828684151172638,
"accuracy": 0.296999990940094,
"macro_precision": 0.39974313974380493,
"macro_recall": 0.3465455174446106
},
"empathetic": {
"macro_f1": 0.1955133080482483,
"accuracy": 0.2070000022649765,
"macro_precision": 0.34211501479148865,
"macro_recall": 0.19607101380825043
},
"banking77": {
"macro_f1": 0.062161438167095184,
"accuracy": 0.0689999982714653,
"macro_precision": 0.10687083750963211,
"macro_recall": 0.06681493669748306
},
"biasframes_intent": {
"macro_f1": 0.6038716435432434,
"accuracy": 0.6039999723434448,
"macro_precision": 0.6096311807632446,
"macro_recall": 0.6088849306106567
},
"massive": {
"macro_f1": 0.36915647983551025,
"accuracy": 0.3720000088214874,
"macro_precision": 0.4527166485786438,
"macro_recall": 0.4299509525299072
},
"agnews": {
"macro_f1": 0.7285709381103516,
"accuracy": 0.7319999933242798,
"macro_precision": 0.7486485242843628,
"macro_recall": 0.7356671690940857
},
"yahootopics": {
"macro_f1": 0.3332909643650055,
"accuracy": 0.4169999957084656,
"macro_precision": 0.4121311902999878,
"macro_recall": 0.37010493874549866
},
"trueteacher": {
"macro_f1": 0.36477410793304443,
"accuracy": 0.49000000953674316,
"macro_precision": 0.47578948736190796,
"macro_recall": 0.4953993558883667
},
"manifesto": {
"macro_f1": 0.018068579956889153,
"accuracy": 0.024000000208616257,
"macro_precision": 0.06057813763618469,
"macro_recall": 0.04148751124739647
},
"capsotu": {
"macro_f1": 0.13261830806732178,
"accuracy": 0.24300000071525574,
"macro_precision": 0.17876844108104706,
"macro_recall": 0.19008205831050873
},
"biasframes_offensive": {
"macro_f1": 0.6369782090187073,
"accuracy": 0.6690000295639038,
"macro_precision": 0.6752663850784302,
"macro_recall": 0.6407065391540527
},
"biasframes_sex": {
"macro_f1": 0.4285886883735657,
"accuracy": 0.5270000100135803,
"macro_precision": 0.5510167479515076,
"macro_recall": 0.732758641242981
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.7741836905479431,
"accuracy": 0.777999997138977,
"macro_precision": 0.7908857464790344,
"macro_recall": 0.7751439809799194
},
"wikitoxic_obscene": {
"macro_f1": 0.8325483202934265,
"accuracy": 0.8349999785423279,
"macro_precision": 0.8306154608726501,
"macro_recall": 0.8376864194869995
},
"wikitoxic_threat": {
"macro_f1": 0.39473915100097656,
"accuracy": 0.4950000047683716,
"macro_precision": 0.5400728583335876,
"macro_recall": 0.7358786463737488
},
"wikitoxic_insult": {
"macro_f1": 0.7261716723442078,
"accuracy": 0.7269999980926514,
"macro_precision": 0.7873606085777283,
"macro_recall": 0.7702758312225342
}
}
} |
1.0 | {
"name": "bert-large-uncased-nli",
"model_type": "nli",
"params": "335M",
"revision": "unknown",
"url": "https://huggingface.co/bert-large-uncased-nli"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.533903,
"accuracy": 0.572794,
"macro_precision": 0.599148,
"macro_recall": 0.576039
},
"by_task": {
"sentiment": {
"macro_f1": 0.790287,
"accuracy": 0.794577,
"macro_precision": 0.804526,
"macro_recall": 0.811546
},
"emotion": {
"macro_f1": 0.273093,
"accuracy": 0.337,
"macro_precision": 0.393333,
"macro_recall": 0.28053
},
"intent": {
"macro_f1": 0.353252,
"accuracy": 0.345667,
"macro_precision": 0.435347,
"macro_recall": 0.349971
},
"topic": {
"macro_f1": 0.490746,
"accuracy": 0.556636,
"macro_precision": 0.569217,
"macro_recall": 0.562964
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.83831787109375,
"accuracy": 0.8399999737739563,
"macro_precision": 0.8487353324890137,
"macro_recall": 0.8379144668579102
},
"imdb": {
"macro_f1": 0.8048792481422424,
"accuracy": 0.8100000023841858,
"macro_precision": 0.8359606266021729,
"macro_recall": 0.806333065032959
},
"appreviews": {
"macro_f1": 0.8546218872070312,
"accuracy": 0.8550000190734863,
"macro_precision": 0.8577853441238403,
"macro_recall": 0.8546466827392578
},
"yelpreviews": {
"macro_f1": 0.8967098593711853,
"accuracy": 0.8970000147819519,
"macro_precision": 0.8985104560852051,
"macro_recall": 0.8962278366088867
},
"rottentomatoes": {
"macro_f1": 0.7067925333976746,
"accuracy": 0.7176360487937927,
"macro_precision": 0.7554200887680054,
"macro_recall": 0.717635989189148
},
"financialphrasebank": {
"macro_f1": 0.6403998136520386,
"accuracy": 0.647826075553894,
"macro_precision": 0.630742609500885,
"macro_recall": 0.7565159797668457
},
"emotiondair": {
"macro_f1": 0.4244973361492157,
"accuracy": 0.5479999780654907,
"macro_precision": 0.45579445362091064,
"macro_recall": 0.4311284124851227
},
"empathetic": {
"macro_f1": 0.12168917059898376,
"accuracy": 0.12600000202655792,
"macro_precision": 0.33087220788002014,
"macro_recall": 0.12993068993091583
},
"banking77": {
"macro_f1": 0.07846608012914658,
"accuracy": 0.08100000023841858,
"macro_precision": 0.166195347905159,
"macro_recall": 0.08436316251754761
},
"biasframes_intent": {
"macro_f1": 0.6058075428009033,
"accuracy": 0.6069999933242798,
"macro_precision": 0.6058303117752075,
"macro_recall": 0.6063221096992493
},
"massive": {
"macro_f1": 0.375482976436615,
"accuracy": 0.3490000069141388,
"macro_precision": 0.5340158343315125,
"macro_recall": 0.3592279553413391
},
"agnews": {
"macro_f1": 0.7425510883331299,
"accuracy": 0.7480000257492065,
"macro_precision": 0.7552778124809265,
"macro_recall": 0.748619794845581
},
"yahootopics": {
"macro_f1": 0.21358641982078552,
"accuracy": 0.2549999952316284,
"macro_precision": 0.4941094219684601,
"macro_recall": 0.22243475914001465
},
"trueteacher": {
"macro_f1": 0.39294764399528503,
"accuracy": 0.49399998784065247,
"macro_precision": 0.4967177212238312,
"macro_recall": 0.49896785616874695
},
"manifesto": {
"macro_f1": 0.01616549678146839,
"accuracy": 0.01600000075995922,
"macro_precision": 0.07381048053503036,
"macro_recall": 0.04045149311423302
},
"capsotu": {
"macro_f1": 0.1265260875225067,
"accuracy": 0.21799999475479126,
"macro_precision": 0.32240763306617737,
"macro_recall": 0.17148847877979279
},
"biasframes_offensive": {
"macro_f1": 0.5404279828071594,
"accuracy": 0.5989999771118164,
"macro_precision": 0.5906268954277039,
"macro_recall": 0.5612396001815796
},
"biasframes_sex": {
"macro_f1": 0.5755754113197327,
"accuracy": 0.7559999823570251,
"macro_precision": 0.585429847240448,
"macro_recall": 0.8219488859176636
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.6412836313247681,
"accuracy": 0.6740000247955322,
"macro_precision": 0.7500104904174805,
"macro_recall": 0.6665986180305481
},
"wikitoxic_obscene": {
"macro_f1": 0.7814091444015503,
"accuracy": 0.7940000295639038,
"macro_precision": 0.7978278994560242,
"macro_recall": 0.7754977941513062
},
"wikitoxic_threat": {
"macro_f1": 0.5536200404167175,
"accuracy": 0.7540000081062317,
"macro_precision": 0.5720260143280029,
"macro_recall": 0.8496576547622681
},
"wikitoxic_insult": {
"macro_f1": 0.814115047454834,
"accuracy": 0.8149999976158142,
"macro_precision": 0.8231396079063416,
"macro_recall": 0.8357025980949402
}
}
} |
1.0 | {
"name": "deberta-v3-base-nli",
"model_type": "nli",
"params": "184M",
"revision": "unknown",
"url": "https://huggingface.co/deberta-v3-base-nli"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.550391,
"accuracy": 0.581439,
"macro_precision": 0.620075,
"macro_recall": 0.608686
},
"by_task": {
"sentiment": {
"macro_f1": 0.858712,
"accuracy": 0.857776,
"macro_precision": 0.859484,
"macro_recall": 0.879552
},
"emotion": {
"macro_f1": 0.33363,
"accuracy": 0.3485,
"macro_precision": 0.460301,
"macro_recall": 0.366114
},
"intent": {
"macro_f1": 0.309029,
"accuracy": 0.323333,
"macro_precision": 0.434245,
"macro_recall": 0.338941
},
"topic": {
"macro_f1": 0.487453,
"accuracy": 0.543455,
"macro_precision": 0.569219,
"macro_recall": 0.578612
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.89799964427948,
"accuracy": 0.8980000019073486,
"macro_precision": 0.8987488746643066,
"macro_recall": 0.8986594676971436
},
"imdb": {
"macro_f1": 0.9139721393585205,
"accuracy": 0.9139999747276306,
"macro_precision": 0.9139114022254944,
"macro_recall": 0.9141499996185303
},
"appreviews": {
"macro_f1": 0.9099708199501038,
"accuracy": 0.9100000262260437,
"macro_precision": 0.9109716415405273,
"macro_recall": 0.9102022647857666
},
"yelpreviews": {
"macro_f1": 0.9299719333648682,
"accuracy": 0.9300000071525574,
"macro_precision": 0.9327430725097656,
"macro_recall": 0.9311494827270508
},
"rottentomatoes": {
"macro_f1": 0.8234999775886536,
"accuracy": 0.8236397504806519,
"macro_precision": 0.8246683478355408,
"macro_recall": 0.8236397504806519
},
"financialphrasebank": {
"macro_f1": 0.67685467004776,
"accuracy": 0.6710144877433777,
"macro_precision": 0.6758581399917603,
"macro_recall": 0.7995125651359558
},
"emotiondair": {
"macro_f1": 0.3779505789279938,
"accuracy": 0.3790000081062317,
"macro_precision": 0.4566940665245056,
"macro_recall": 0.4229639172554016
},
"empathetic": {
"macro_f1": 0.28930890560150146,
"accuracy": 0.3179999887943268,
"macro_precision": 0.4639076590538025,
"macro_recall": 0.3092634081840515
},
"banking77": {
"macro_f1": 0.12482473254203796,
"accuracy": 0.12700000405311584,
"macro_precision": 0.23179928958415985,
"macro_recall": 0.13334208726882935
},
"biasframes_intent": {
"macro_f1": 0.4608410596847534,
"accuracy": 0.5139999985694885,
"macro_precision": 0.5807955861091614,
"macro_recall": 0.5409887433052063
},
"massive": {
"macro_f1": 0.3414219319820404,
"accuracy": 0.32899999618530273,
"macro_precision": 0.49014008045196533,
"macro_recall": 0.34249347448349
},
"agnews": {
"macro_f1": 0.7646851539611816,
"accuracy": 0.7639999985694885,
"macro_precision": 0.7633499503135681,
"macro_recall": 0.7681264877319336
},
"yahootopics": {
"macro_f1": 0.45720019936561584,
"accuracy": 0.5289999842643738,
"macro_precision": 0.5088838934898376,
"macro_recall": 0.4725315272808075
},
"trueteacher": {
"macro_f1": 0.3327486515045166,
"accuracy": 0.4909999966621399,
"macro_precision": 0.3890807032585144,
"macro_recall": 0.4969155490398407
},
"manifesto": {
"macro_f1": 0.03519533574581146,
"accuracy": 0.05299999937415123,
"macro_precision": 0.13150320947170258,
"macro_recall": 0.082496777176857
},
"capsotu": {
"macro_f1": 0.16506196558475494,
"accuracy": 0.23399999737739563,
"macro_precision": 0.37529000639915466,
"macro_recall": 0.19229184091091156
},
"biasframes_offensive": {
"macro_f1": 0.5039682388305664,
"accuracy": 0.6000000238418579,
"macro_precision": 0.6127842664718628,
"macro_recall": 0.5509325265884399
},
"biasframes_sex": {
"macro_f1": 0.4438576102256775,
"accuracy": 0.5529999732971191,
"macro_precision": 0.5521162152290344,
"macro_recall": 0.738469123840332
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.7829363942146301,
"accuracy": 0.7839999794960022,
"macro_precision": 0.7942668199539185,
"macro_recall": 0.7863515615463257
},
"wikitoxic_obscene": {
"macro_f1": 0.8274722099304199,
"accuracy": 0.8320000171661377,
"macro_precision": 0.8272578120231628,
"macro_recall": 0.8276936411857605
},
"wikitoxic_threat": {
"macro_f1": 0.37141063809394836,
"accuracy": 0.4569999873638153,
"macro_precision": 0.5374786853790283,
"macro_recall": 0.7160041928291321
},
"wikitoxic_insult": {
"macro_f1": 0.6774438619613647,
"accuracy": 0.6809999942779541,
"macro_precision": 0.7694025039672852,
"macro_recall": 0.7329212427139282
}
}
} |
1.0 | {
"name": "deberta-v3-large-nli-triplet",
"model_type": "nli",
"params": "434M",
"revision": "unknown",
"url": "https://huggingface.co/deberta-v3-large-nli-triplet"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.595813,
"accuracy": 0.619343,
"macro_precision": 0.666545,
"macro_recall": 0.647509
},
"by_task": {
"sentiment": {
"macro_f1": 0.898852,
"accuracy": 0.903925,
"macro_precision": 0.898032,
"macro_recall": 0.90085
},
"emotion": {
"macro_f1": 0.418814,
"accuracy": 0.431,
"macro_precision": 0.520985,
"macro_recall": 0.446663
},
"intent": {
"macro_f1": 0.446877,
"accuracy": 0.455333,
"macro_precision": 0.531133,
"macro_recall": 0.469582
},
"topic": {
"macro_f1": 0.503319,
"accuracy": 0.543091,
"macro_precision": 0.603675,
"macro_recall": 0.594366
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9288427829742432,
"accuracy": 0.9290000200271606,
"macro_precision": 0.9301048517227173,
"macro_recall": 0.9284056425094604
},
"imdb": {
"macro_f1": 0.9268383979797363,
"accuracy": 0.9269999861717224,
"macro_precision": 0.9280955195426941,
"macro_recall": 0.9264042377471924
},
"appreviews": {
"macro_f1": 0.929977297782898,
"accuracy": 0.9300000071525574,
"macro_precision": 0.9302252531051636,
"macro_recall": 0.9299155473709106
},
"yelpreviews": {
"macro_f1": 0.980995774269104,
"accuracy": 0.9810000061988831,
"macro_precision": 0.9809557199478149,
"macro_recall": 0.9812732934951782
},
"rottentomatoes": {
"macro_f1": 0.8374935388565063,
"accuracy": 0.8377110958099365,
"macro_precision": 0.8395289182662964,
"macro_recall": 0.8377110958099365
},
"financialphrasebank": {
"macro_f1": 0.7889612913131714,
"accuracy": 0.8188405632972717,
"macro_precision": 0.7792794108390808,
"macro_recall": 0.8013922572135925
},
"emotiondair": {
"macro_f1": 0.4229159951210022,
"accuracy": 0.4390000104904175,
"macro_precision": 0.49999648332595825,
"macro_recall": 0.47307032346725464
},
"empathetic": {
"macro_f1": 0.41471126675605774,
"accuracy": 0.4230000078678131,
"macro_precision": 0.5419734716415405,
"macro_recall": 0.4202551543712616
},
"banking77": {
"macro_f1": 0.23813845217227936,
"accuracy": 0.24400000274181366,
"macro_precision": 0.34609121084213257,
"macro_recall": 0.2633965313434601
},
"biasframes_intent": {
"macro_f1": 0.6989635825157166,
"accuracy": 0.6990000009536743,
"macro_precision": 0.7016400098800659,
"macro_recall": 0.7022200226783752
},
"massive": {
"macro_f1": 0.4035292863845825,
"accuracy": 0.4230000078678131,
"macro_precision": 0.5456680059432983,
"macro_recall": 0.44312942028045654
},
"agnews": {
"macro_f1": 0.8319669365882874,
"accuracy": 0.8320000171661377,
"macro_precision": 0.8317348957061768,
"macro_recall": 0.8341231346130371
},
"yahootopics": {
"macro_f1": 0.2758510410785675,
"accuracy": 0.37299999594688416,
"macro_precision": 0.3875195384025574,
"macro_recall": 0.3296804428100586
},
"trueteacher": {
"macro_f1": 0.4128504991531372,
"accuracy": 0.5130000114440918,
"macro_precision": 0.5605462193489075,
"macro_recall": 0.51803058385849
},
"manifesto": {
"macro_f1": 0.059760164469480515,
"accuracy": 0.09300000220537186,
"macro_precision": 0.18072746694087982,
"macro_recall": 0.09268350154161453
},
"capsotu": {
"macro_f1": 0.24564850330352783,
"accuracy": 0.27300000190734863,
"macro_precision": 0.47797539830207825,
"macro_recall": 0.2954987585544586
},
"biasframes_offensive": {
"macro_f1": 0.6438407301902771,
"accuracy": 0.6669999957084656,
"macro_precision": 0.6651572585105896,
"macro_recall": 0.6444008350372314
},
"biasframes_sex": {
"macro_f1": 0.2731889486312866,
"accuracy": 0.296999990940094,
"macro_precision": 0.5381077527999878,
"macro_recall": 0.6268577575683594
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.8225740194320679,
"accuracy": 0.8230000138282776,
"macro_precision": 0.8296811580657959,
"macro_recall": 0.8248316049575806
},
"wikitoxic_obscene": {
"macro_f1": 0.8422045707702637,
"accuracy": 0.8429999947547913,
"macro_precision": 0.8449709415435791,
"macro_recall": 0.8543341755867004
},
"wikitoxic_threat": {
"macro_f1": 0.4374711513519287,
"accuracy": 0.5659999847412109,
"macro_precision": 0.5460250973701477,
"macro_recall": 0.7730125188827515
},
"wikitoxic_insult": {
"macro_f1": 0.6911536455154419,
"accuracy": 0.6940000057220459,
"macro_precision": 0.7779843807220459,
"macro_recall": 0.7445700168609619
}
}
} |
1.0 | {
"name": "deberta-v3-large-nli",
"model_type": "nli",
"params": "434M",
"revision": "unknown",
"url": "https://huggingface.co/deberta-v3-large-nli"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.591105,
"accuracy": 0.618846,
"macro_precision": 0.674781,
"macro_recall": 0.64852
},
"by_task": {
"sentiment": {
"macro_f1": 0.897813,
"accuracy": 0.901768,
"macro_precision": 0.896349,
"macro_recall": 0.904284
},
"emotion": {
"macro_f1": 0.439626,
"accuracy": 0.4745,
"macro_precision": 0.513103,
"macro_recall": 0.465984
},
"intent": {
"macro_f1": 0.476085,
"accuracy": 0.49,
"macro_precision": 0.575433,
"macro_recall": 0.503048
},
"topic": {
"macro_f1": 0.48272,
"accuracy": 0.525909,
"macro_precision": 0.610417,
"macro_recall": 0.581875
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9227490425109863,
"accuracy": 0.9229999780654907,
"macro_precision": 0.9251483678817749,
"macro_recall": 0.9221413731575012
},
"imdb": {
"macro_f1": 0.9023271799087524,
"accuracy": 0.902999997138977,
"macro_precision": 0.9092003107070923,
"macro_recall": 0.9014514088630676
},
"appreviews": {
"macro_f1": 0.9279815554618835,
"accuracy": 0.9279999732971191,
"macro_precision": 0.9281506538391113,
"macro_recall": 0.9279314279556274
},
"yelpreviews": {
"macro_f1": 0.9829909801483154,
"accuracy": 0.9829999804496765,
"macro_precision": 0.9829331636428833,
"macro_recall": 0.9830665588378906
},
"rottentomatoes": {
"macro_f1": 0.8481759428977966,
"accuracy": 0.8489680886268616,
"macro_precision": 0.8564063906669617,
"macro_recall": 0.8489681482315063
},
"financialphrasebank": {
"macro_f1": 0.8026540279388428,
"accuracy": 0.8246376514434814,
"macro_precision": 0.7762570381164551,
"macro_recall": 0.8421474099159241
},
"emotiondair": {
"macro_f1": 0.440132737159729,
"accuracy": 0.4909999966621399,
"macro_precision": 0.4692642092704773,
"macro_recall": 0.4845179617404938
},
"empathetic": {
"macro_f1": 0.43911874294281006,
"accuracy": 0.4580000042915344,
"macro_precision": 0.5569407939910889,
"macro_recall": 0.4474509358406067
},
"banking77": {
"macro_f1": 0.34563589096069336,
"accuracy": 0.3610000014305115,
"macro_precision": 0.4481724798679352,
"macro_recall": 0.38250041007995605
},
"biasframes_intent": {
"macro_f1": 0.6648926734924316,
"accuracy": 0.6710000038146973,
"macro_precision": 0.7091915607452393,
"macro_recall": 0.6852177977561951
},
"massive": {
"macro_f1": 0.4177260398864746,
"accuracy": 0.43799999356269836,
"macro_precision": 0.5689337849617004,
"macro_recall": 0.44142600893974304
},
"agnews": {
"macro_f1": 0.814592182636261,
"accuracy": 0.8140000104904175,
"macro_precision": 0.8132357597351074,
"macro_recall": 0.8169254064559937
},
"yahootopics": {
"macro_f1": 0.5326335430145264,
"accuracy": 0.6019999980926514,
"macro_precision": 0.5630514621734619,
"macro_recall": 0.5421027541160583
},
"trueteacher": {
"macro_f1": 0.3410828411579132,
"accuracy": 0.49799999594688416,
"macro_precision": 0.6646546125411987,
"macro_recall": 0.503928542137146
},
"manifesto": {
"macro_f1": 0.05605190992355347,
"accuracy": 0.10899999737739563,
"macro_precision": 0.10709811747074127,
"macro_recall": 0.11071651428937912
},
"capsotu": {
"macro_f1": 0.2141467034816742,
"accuracy": 0.2280000001192093,
"macro_precision": 0.41052955389022827,
"macro_recall": 0.22876231372356415
},
"biasframes_offensive": {
"macro_f1": 0.5448735952377319,
"accuracy": 0.6349999904632568,
"macro_precision": 0.6968613862991333,
"macro_recall": 0.5858902335166931
},
"biasframes_sex": {
"macro_f1": 0.27395033836364746,
"accuracy": 0.2980000078678131,
"macro_precision": 0.5381578803062439,
"macro_recall": 0.6273885369300842
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.7974669933319092,
"accuracy": 0.7990000247955322,
"macro_precision": 0.8141666650772095,
"macro_recall": 0.8018040060997009
},
"wikitoxic_obscene": {
"macro_f1": 0.8187779784202576,
"accuracy": 0.8190000057220459,
"macro_precision": 0.8296204805374146,
"macro_recall": 0.8357380032539368
},
"wikitoxic_threat": {
"macro_f1": 0.31557148694992065,
"accuracy": 0.3700000047683716,
"macro_precision": 0.5326409339904785,
"macro_recall": 0.6705020666122437
},
"wikitoxic_insult": {
"macro_f1": 0.6007736921310425,
"accuracy": 0.6129999756813049,
"macro_precision": 0.7445706129074097,
"macro_recall": 0.6768685579299927
}
}
} |
1.0 | {
"name": "modernbert-base-nli",
"model_type": "nli",
"params": "149M",
"revision": "unknown",
"url": "https://huggingface.co/modernbert-base-nli"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.53426,
"accuracy": 0.563121,
"macro_precision": 0.607563,
"macro_recall": 0.579765
},
"by_task": {
"sentiment": {
"macro_f1": 0.835215,
"accuracy": 0.838109,
"macro_precision": 0.850589,
"macro_recall": 0.842525
},
"emotion": {
"macro_f1": 0.289969,
"accuracy": 0.289,
"macro_precision": 0.437244,
"macro_recall": 0.310462
},
"intent": {
"macro_f1": 0.271229,
"accuracy": 0.294667,
"macro_precision": 0.394436,
"macro_recall": 0.297341
},
"topic": {
"macro_f1": 0.486255,
"accuracy": 0.536182,
"macro_precision": 0.564095,
"macro_recall": 0.562431
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9107679128646851,
"accuracy": 0.9110000133514404,
"macro_precision": 0.9123939871788025,
"macro_recall": 0.9102893471717834
},
"imdb": {
"macro_f1": 0.890692949295044,
"accuracy": 0.890999972820282,
"macro_precision": 0.8924717903137207,
"macro_recall": 0.8902238011360168
},
"appreviews": {
"macro_f1": 0.907821536064148,
"accuracy": 0.9079999923706055,
"macro_precision": 0.9103059768676758,
"macro_recall": 0.907706081867218
},
"yelpreviews": {
"macro_f1": 0.9599224328994751,
"accuracy": 0.9599999785423279,
"macro_precision": 0.9609659910202026,
"macro_recall": 0.9595046043395996
},
"rottentomatoes": {
"macro_f1": 0.7408660054206848,
"accuracy": 0.7514071464538574,
"macro_precision": 0.8002640008926392,
"macro_recall": 0.7514071464538574
},
"financialphrasebank": {
"macro_f1": 0.6012195348739624,
"accuracy": 0.6072463989257812,
"macro_precision": 0.6271319389343262,
"macro_recall": 0.6360207200050354
},
"emotiondair": {
"macro_f1": 0.2848603427410126,
"accuracy": 0.28299999237060547,
"macro_precision": 0.4357683062553406,
"macro_recall": 0.3360637426376343
},
"empathetic": {
"macro_f1": 0.29507842659950256,
"accuracy": 0.29499998688697815,
"macro_precision": 0.43872055411338806,
"macro_recall": 0.284860223531723
},
"banking77": {
"macro_f1": 0.10325706750154495,
"accuracy": 0.08799999952316284,
"macro_precision": 0.19969211518764496,
"macro_recall": 0.09043437242507935
},
"biasframes_intent": {
"macro_f1": 0.4637435972690582,
"accuracy": 0.5189999938011169,
"macro_precision": 0.5955341458320618,
"macro_recall": 0.5465528964996338
},
"massive": {
"macro_f1": 0.2466864287853241,
"accuracy": 0.2770000100135803,
"macro_precision": 0.3880825638771057,
"macro_recall": 0.2550359070301056
},
"agnews": {
"macro_f1": 0.7388247847557068,
"accuracy": 0.7509999871253967,
"macro_precision": 0.7723070979118347,
"macro_recall": 0.7503945827484131
},
"yahootopics": {
"macro_f1": 0.4015524387359619,
"accuracy": 0.4440000057220459,
"macro_precision": 0.518562912940979,
"macro_recall": 0.4001066982746124
},
"trueteacher": {
"macro_f1": 0.4156147837638855,
"accuracy": 0.5,
"macro_precision": 0.5114648938179016,
"macro_recall": 0.5046326518058777
},
"manifesto": {
"macro_f1": 0.015092020854353905,
"accuracy": 0.02800000086426735,
"macro_precision": 0.07944910228252411,
"macro_recall": 0.0514046847820282
},
"capsotu": {
"macro_f1": 0.11951573193073273,
"accuracy": 0.18199999630451202,
"macro_precision": 0.2925623059272766,
"macro_recall": 0.19477753341197968
},
"biasframes_offensive": {
"macro_f1": 0.5483788847923279,
"accuracy": 0.6209999918937683,
"macro_precision": 0.637680172920227,
"macro_recall": 0.5776380896568298
},
"biasframes_sex": {
"macro_f1": 0.6421589255332947,
"accuracy": 0.8360000252723694,
"macro_precision": 0.6163214445114136,
"macro_recall": 0.8482319116592407
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.7495993375778198,
"accuracy": 0.75,
"macro_precision": 0.7542345523834229,
"macro_recall": 0.7515480518341064
},
"wikitoxic_obscene": {
"macro_f1": 0.6825619339942932,
"accuracy": 0.7099999785423279,
"macro_precision": 0.7136391997337341,
"macro_recall": 0.6800752878189087
},
"wikitoxic_threat": {
"macro_f1": 0.2855295240879059,
"accuracy": 0.32600000500679016,
"macro_precision": 0.5306406617164612,
"macro_recall": 0.6474895477294922
},
"wikitoxic_insult": {
"macro_f1": 0.7499749660491943,
"accuracy": 0.75,
"macro_precision": 0.7781862616539001,
"macro_recall": 0.7804387211799622
}
}
} |
1.0 | {
"name": "modernbert-large-nli-triplet",
"model_type": "nli",
"params": "395M",
"revision": "unknown",
"url": "https://huggingface.co/modernbert-large-nli-triplet"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.548722,
"accuracy": 0.581244,
"macro_precision": 0.637688,
"macro_recall": 0.597926
},
"by_task": {
"sentiment": {
"macro_f1": 0.876335,
"accuracy": 0.890394,
"macro_precision": 0.892096,
"macro_recall": 0.870178
},
"emotion": {
"macro_f1": 0.342339,
"accuracy": 0.352,
"macro_precision": 0.472253,
"macro_recall": 0.373139
},
"intent": {
"macro_f1": 0.40947,
"accuracy": 0.421333,
"macro_precision": 0.503019,
"macro_recall": 0.429913
},
"topic": {
"macro_f1": 0.445527,
"accuracy": 0.497909,
"macro_precision": 0.565727,
"macro_recall": 0.536117
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9287685751914978,
"accuracy": 0.9290000200271606,
"macro_precision": 0.9311951398849487,
"macro_recall": 0.9281454086303711
},
"imdb": {
"macro_f1": 0.9105490446090698,
"accuracy": 0.9110000133514404,
"macro_precision": 0.915077269077301,
"macro_recall": 0.9097689986228943
},
"appreviews": {
"macro_f1": 0.9197109937667847,
"accuracy": 0.9200000166893005,
"macro_precision": 0.9248816967010498,
"macro_recall": 0.9195788502693176
},
"yelpreviews": {
"macro_f1": 0.9789847135543823,
"accuracy": 0.9789999723434448,
"macro_precision": 0.9790115356445312,
"macro_recall": 0.9789597988128662
},
"rottentomatoes": {
"macro_f1": 0.8706203699111938,
"accuracy": 0.8714821934700012,
"macro_precision": 0.8816505074501038,
"macro_recall": 0.8714821338653564
},
"financialphrasebank": {
"macro_f1": 0.6493753790855408,
"accuracy": 0.7318840622901917,
"macro_precision": 0.7207602262496948,
"macro_recall": 0.6131308674812317
},
"emotiondair": {
"macro_f1": 0.3704449534416199,
"accuracy": 0.36800000071525574,
"macro_precision": 0.49263378977775574,
"macro_recall": 0.4243590533733368
},
"empathetic": {
"macro_f1": 0.31423282623291016,
"accuracy": 0.335999995470047,
"macro_precision": 0.45187288522720337,
"macro_recall": 0.3219197690486908
},
"banking77": {
"macro_f1": 0.2112514078617096,
"accuracy": 0.21799999475479126,
"macro_precision": 0.3550959825515747,
"macro_recall": 0.22298629581928253
},
"biasframes_intent": {
"macro_f1": 0.5957847833633423,
"accuracy": 0.6019999980926514,
"macro_precision": 0.6277194619178772,
"macro_recall": 0.6149761080741882
},
"massive": {
"macro_f1": 0.421375036239624,
"accuracy": 0.4440000057220459,
"macro_precision": 0.5262409448623657,
"macro_recall": 0.45177799463272095
},
"agnews": {
"macro_f1": 0.706705629825592,
"accuracy": 0.7160000205039978,
"macro_precision": 0.7674962282180786,
"macro_recall": 0.7170899510383606
},
"yahootopics": {
"macro_f1": 0.24883794784545898,
"accuracy": 0.28600001335144043,
"macro_precision": 0.4079374372959137,
"macro_recall": 0.25621747970581055
},
"trueteacher": {
"macro_f1": 0.40925976634025574,
"accuracy": 0.5109999775886536,
"macro_precision": 0.5184117555618286,
"macro_recall": 0.5060928463935852
},
"manifesto": {
"macro_f1": 0.05937906354665756,
"accuracy": 0.07699999958276749,
"macro_precision": 0.20576725900173187,
"macro_recall": 0.08327395468950272
},
"capsotu": {
"macro_f1": 0.05233711004257202,
"accuracy": 0.12800000607967377,
"macro_precision": 0.17092861235141754,
"macro_recall": 0.0844113677740097
},
"biasframes_offensive": {
"macro_f1": 0.4505764842033386,
"accuracy": 0.5979999899864197,
"macro_precision": 0.7015386819839478,
"macro_recall": 0.5382528901100159
},
"biasframes_sex": {
"macro_f1": 0.344501793384552,
"accuracy": 0.3970000147819519,
"macro_precision": 0.5417810678482056,
"macro_recall": 0.6718463897705078
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.7348260283470154,
"accuracy": 0.7440000176429749,
"macro_precision": 0.7960731983184814,
"macro_recall": 0.7493425607681274
},
"wikitoxic_obscene": {
"macro_f1": 0.8030894994735718,
"accuracy": 0.8040000200271606,
"macro_precision": 0.8065363168716431,
"macro_recall": 0.8147618770599365
},
"wikitoxic_threat": {
"macro_f1": 0.4223605990409851,
"accuracy": 0.5429999828338623,
"macro_precision": 0.5420881509780884,
"macro_recall": 0.7501426935195923
},
"wikitoxic_insult": {
"macro_f1": 0.6689207553863525,
"accuracy": 0.6729999780654907,
"macro_precision": 0.7644418478012085,
"macro_recall": 0.7258509993553162
}
}
} |
1.0 | {
"name": "modernbert-large-nli",
"model_type": "nli",
"params": "395M",
"revision": "unknown",
"url": "https://huggingface.co/modernbert-large-nli"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.551713,
"accuracy": 0.592935,
"macro_precision": 0.640183,
"macro_recall": 0.603994
},
"by_task": {
"sentiment": {
"macro_f1": 0.857985,
"accuracy": 0.884428,
"macro_precision": 0.899025,
"macro_recall": 0.852799
},
"emotion": {
"macro_f1": 0.300529,
"accuracy": 0.298,
"macro_precision": 0.469477,
"macro_recall": 0.327006
},
"intent": {
"macro_f1": 0.401046,
"accuracy": 0.42,
"macro_precision": 0.465542,
"macro_recall": 0.420738
},
"topic": {
"macro_f1": 0.471416,
"accuracy": 0.534727,
"macro_precision": 0.577663,
"macro_recall": 0.568622
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9326210021972656,
"accuracy": 0.9330000281333923,
"macro_precision": 0.9381246566772461,
"macro_recall": 0.9316798448562622
},
"imdb": {
"macro_f1": 0.9136237502098083,
"accuracy": 0.9139999747276306,
"macro_precision": 0.9173109531402588,
"macro_recall": 0.9129011034965515
},
"appreviews": {
"macro_f1": 0.9196913242340088,
"accuracy": 0.9200000166893005,
"macro_precision": 0.9252582788467407,
"macro_recall": 0.919562816619873
},
"yelpreviews": {
"macro_f1": 0.9789742231369019,
"accuracy": 0.9789999723434448,
"macro_precision": 0.979356050491333,
"macro_recall": 0.9787516593933105
},
"rottentomatoes": {
"macro_f1": 0.8638569712638855,
"accuracy": 0.8649155497550964,
"macro_precision": 0.8766298294067383,
"macro_recall": 0.8649156093597412
},
"financialphrasebank": {
"macro_f1": 0.5391452312469482,
"accuracy": 0.695652186870575,
"macro_precision": 0.7574691772460938,
"macro_recall": 0.5089853405952454
},
"emotiondair": {
"macro_f1": 0.2996448278427124,
"accuracy": 0.28299999237060547,
"macro_precision": 0.4735690951347351,
"macro_recall": 0.3506722152233124
},
"empathetic": {
"macro_f1": 0.30141282081604004,
"accuracy": 0.31299999356269836,
"macro_precision": 0.46538588404655457,
"macro_recall": 0.3033400774002075
},
"banking77": {
"macro_f1": 0.21216264367103577,
"accuracy": 0.23999999463558197,
"macro_precision": 0.28705841302871704,
"macro_recall": 0.24262915551662445
},
"biasframes_intent": {
"macro_f1": 0.6310776472091675,
"accuracy": 0.6320000290870667,
"macro_precision": 0.6431213617324829,
"macro_recall": 0.6394937038421631
},
"massive": {
"macro_f1": 0.3598971664905548,
"accuracy": 0.3880000114440918,
"macro_precision": 0.4664466083049774,
"macro_recall": 0.3800908029079437
},
"agnews": {
"macro_f1": 0.7555487155914307,
"accuracy": 0.7580000162124634,
"macro_precision": 0.7711875438690186,
"macro_recall": 0.7599613070487976
},
"yahootopics": {
"macro_f1": 0.4827136993408203,
"accuracy": 0.5440000295639038,
"macro_precision": 0.5348893404006958,
"macro_recall": 0.48952868580818176
},
"trueteacher": {
"macro_f1": 0.3737006187438965,
"accuracy": 0.5180000066757202,
"macro_precision": 0.6215447783470154,
"macro_recall": 0.5123137831687927
},
"manifesto": {
"macro_f1": 0.0924706980586052,
"accuracy": 0.09799999743700027,
"macro_precision": 0.14003317058086395,
"macro_recall": 0.1358354538679123
},
"capsotu": {
"macro_f1": 0.03901638463139534,
"accuracy": 0.08799999952316284,
"macro_precision": 0.17529267072677612,
"macro_recall": 0.07798612862825394
},
"biasframes_offensive": {
"macro_f1": 0.4208526611328125,
"accuracy": 0.5860000252723694,
"macro_precision": 0.6834946870803833,
"macro_recall": 0.5238502621650696
},
"biasframes_sex": {
"macro_f1": 0.4431055784225464,
"accuracy": 0.5479999780654907,
"macro_precision": 0.555086076259613,
"macro_recall": 0.7519950270652771
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.7248871326446533,
"accuracy": 0.734000027179718,
"macro_precision": 0.7819554209709167,
"macro_recall": 0.7392317056655884
},
"wikitoxic_obscene": {
"macro_f1": 0.8074331283569336,
"accuracy": 0.8119999766349792,
"macro_precision": 0.8065485954284668,
"macro_recall": 0.8084891438484192
},
"wikitoxic_threat": {
"macro_f1": 0.4512676000595093,
"accuracy": 0.5889999866485596,
"macro_precision": 0.5483516454696655,
"macro_recall": 0.7850418090820312
},
"wikitoxic_insult": {
"macro_f1": 0.5945841073989868,
"accuracy": 0.6069999933242798,
"macro_precision": 0.7359136939048767,
"macro_recall": 0.6706080436706543
}
}
} |
1.0 | {
"name": "nli-roberta-base",
"model_type": "nli",
"params": "125M",
"revision": "unknown",
"url": "https://huggingface.co/nli-roberta-base"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.48849,
"accuracy": 0.504002,
"macro_precision": 0.579458,
"macro_recall": 0.549751
},
"by_task": {
"sentiment": {
"macro_f1": 0.800026,
"accuracy": 0.791339,
"macro_precision": 0.820028,
"macro_recall": 0.825161
},
"emotion": {
"macro_f1": 0.329737,
"accuracy": 0.3315,
"macro_precision": 0.452395,
"macro_recall": 0.386916
},
"intent": {
"macro_f1": 0.298851,
"accuracy": 0.315667,
"macro_precision": 0.378203,
"macro_recall": 0.310145
},
"topic": {
"macro_f1": 0.399145,
"accuracy": 0.43,
"macro_precision": 0.526228,
"macro_recall": 0.494481
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.886974573135376,
"accuracy": 0.8870000243186951,
"macro_precision": 0.8889477252960205,
"macro_recall": 0.8879902362823486
},
"imdb": {
"macro_f1": 0.8319670557975769,
"accuracy": 0.8320000171661377,
"macro_precision": 0.8319753408432007,
"macro_recall": 0.8321985602378845
},
"appreviews": {
"macro_f1": 0.8939982652664185,
"accuracy": 0.8939999938011169,
"macro_precision": 0.8941649198532104,
"macro_recall": 0.8940892219543457
},
"yelpreviews": {
"macro_f1": 0.8856909275054932,
"accuracy": 0.8859999775886536,
"macro_precision": 0.8943545818328857,
"macro_recall": 0.8879522681236267
},
"rottentomatoes": {
"macro_f1": 0.798117995262146,
"accuracy": 0.7983114719390869,
"macro_precision": 0.7994593381881714,
"macro_recall": 0.7983114719390869
},
"financialphrasebank": {
"macro_f1": 0.503407895565033,
"accuracy": 0.45072463154792786,
"macro_precision": 0.6112631559371948,
"macro_recall": 0.6504240036010742
},
"emotiondair": {
"macro_f1": 0.3415570557117462,
"accuracy": 0.33899998664855957,
"macro_precision": 0.4731428325176239,
"macro_recall": 0.4559451937675476
},
"empathetic": {
"macro_f1": 0.31791791319847107,
"accuracy": 0.3240000009536743,
"macro_precision": 0.4316464066505432,
"macro_recall": 0.3178861141204834
},
"banking77": {
"macro_f1": 0.06683805584907532,
"accuracy": 0.07500000298023224,
"macro_precision": 0.13073302805423737,
"macro_recall": 0.07116952538490295
},
"biasframes_intent": {
"macro_f1": 0.5059967637062073,
"accuracy": 0.5149999856948853,
"macro_precision": 0.531976044178009,
"macro_recall": 0.5283114910125732
},
"massive": {
"macro_f1": 0.32371756434440613,
"accuracy": 0.3569999933242798,
"macro_precision": 0.4719012379646301,
"macro_recall": 0.33095434308052063
},
"agnews": {
"macro_f1": 0.693808913230896,
"accuracy": 0.7049999833106995,
"macro_precision": 0.7239819169044495,
"macro_recall": 0.7066395878791809
},
"yahootopics": {
"macro_f1": 0.34989988803863525,
"accuracy": 0.4059999883174896,
"macro_precision": 0.4634127914905548,
"macro_recall": 0.35932984948158264
},
"trueteacher": {
"macro_f1": 0.45594465732574463,
"accuracy": 0.4830000102519989,
"macro_precision": 0.4758465588092804,
"macro_recall": 0.4803931713104248
},
"manifesto": {
"macro_f1": 0.017918219789862633,
"accuracy": 0.020999999716877937,
"macro_precision": 0.0878804549574852,
"macro_recall": 0.01680239662528038
},
"capsotu": {
"macro_f1": 0.13537120819091797,
"accuracy": 0.23000000417232513,
"macro_precision": 0.25667986273765564,
"macro_recall": 0.17624379694461823
},
"biasframes_offensive": {
"macro_f1": 0.46795955300331116,
"accuracy": 0.5830000042915344,
"macro_precision": 0.5809751749038696,
"macro_recall": 0.5302104949951172
},
"biasframes_sex": {
"macro_f1": 0.18175822496414185,
"accuracy": 0.1860000044107437,
"macro_precision": 0.5289124250411987,
"macro_recall": 0.5598506331443787
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.6956897974014282,
"accuracy": 0.7020000219345093,
"macro_precision": 0.7286233305931091,
"macro_recall": 0.7062214016914368
},
"wikitoxic_obscene": {
"macro_f1": 0.7121915817260742,
"accuracy": 0.7129999995231628,
"macro_precision": 0.7184909582138062,
"macro_recall": 0.7237746715545654
},
"wikitoxic_threat": {
"macro_f1": 0.13583959639072418,
"accuracy": 0.1379999965429306,
"macro_precision": 0.5242825746536255,
"macro_recall": 0.5491631627082825
},
"wikitoxic_insult": {
"macro_f1": 0.5442176461219788,
"accuracy": 0.5630000233650208,
"macro_precision": 0.6994266510009766,
"macro_recall": 0.630657434463501
}
}
} |
1.0 | {
"name": "Qwen3-Reranker-0.6B",
"model_type": "reranker",
"params": "600M",
"revision": "unknown",
"url": "https://huggingface.co/Qwen3-Reranker-0.6B"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.605598,
"accuracy": 0.642526,
"macro_precision": 0.65409,
"macro_recall": 0.669456
},
"by_task": {
"sentiment": {
"macro_f1": 0.80429,
"accuracy": 0.798427,
"macro_precision": 0.841131,
"macro_recall": 0.838923
},
"emotion": {
"macro_f1": 0.447133,
"accuracy": 0.4825,
"macro_precision": 0.48464,
"macro_recall": 0.472349
},
"intent": {
"macro_f1": 0.551512,
"accuracy": 0.571667,
"macro_precision": 0.597638,
"macro_recall": 0.605195
},
"topic": {
"macro_f1": 0.540783,
"accuracy": 0.605909,
"macro_precision": 0.598272,
"macro_recall": 0.630382
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9119647741317749,
"accuracy": 0.9120000004768372,
"macro_precision": 0.9118987321853638,
"macro_recall": 0.9120965600013733
},
"imdb": {
"macro_f1": 0.8839995265007019,
"accuracy": 0.8840000033378601,
"macro_precision": 0.8847362399101257,
"macro_recall": 0.8846499919891357
},
"appreviews": {
"macro_f1": 0.8867498636245728,
"accuracy": 0.8870000243186951,
"macro_precision": 0.8895370364189148,
"macro_recall": 0.8866807222366333
},
"yelpreviews": {
"macro_f1": 0.9459965229034424,
"accuracy": 0.9459999799728394,
"macro_precision": 0.9473351240158081,
"macro_recall": 0.9468480944633484
},
"rottentomatoes": {
"macro_f1": 0.7832179069519043,
"accuracy": 0.7833020687103271,
"macro_precision": 0.7837425470352173,
"macro_recall": 0.7833020687103271
},
"financialphrasebank": {
"macro_f1": 0.4138122797012329,
"accuracy": 0.37826088070869446,
"macro_precision": 0.6295381188392639,
"macro_recall": 0.619957685470581
},
"emotiondair": {
"macro_f1": 0.4866149127483368,
"accuracy": 0.5460000038146973,
"macro_precision": 0.49488916993141174,
"macro_recall": 0.5231236219406128
},
"empathetic": {
"macro_f1": 0.4076511263847351,
"accuracy": 0.4189999997615814,
"macro_precision": 0.47439178824424744,
"macro_recall": 0.42157384753227234
},
"banking77": {
"macro_f1": 0.6274953484535217,
"accuracy": 0.625,
"macro_precision": 0.6613081097602844,
"macro_recall": 0.6616645455360413
},
"biasframes_intent": {
"macro_f1": 0.49556970596313477,
"accuracy": 0.5649999976158142,
"macro_precision": 0.5713315010070801,
"macro_recall": 0.5399225950241089
},
"massive": {
"macro_f1": 0.5314709544181824,
"accuracy": 0.5249999761581421,
"macro_precision": 0.5602739453315735,
"macro_recall": 0.613998532295227
},
"agnews": {
"macro_f1": 0.7880467772483826,
"accuracy": 0.7960000038146973,
"macro_precision": 0.8210300207138062,
"macro_recall": 0.794529914855957
},
"yahootopics": {
"macro_f1": 0.5461462736129761,
"accuracy": 0.6079999804496765,
"macro_precision": 0.5763124227523804,
"macro_recall": 0.5440412759780884
},
"trueteacher": {
"macro_f1": 0.33554816246032715,
"accuracy": 0.5049999952316284,
"macro_precision": 0.25275275111198425,
"macro_recall": 0.4990118443965912
},
"manifesto": {
"macro_f1": 0.2676747143268585,
"accuracy": 0.4009999930858612,
"macro_precision": 0.3079465925693512,
"macro_recall": 0.314266175031662
},
"capsotu": {
"macro_f1": 0.528754472732544,
"accuracy": 0.5899999737739563,
"macro_precision": 0.5465952157974243,
"macro_recall": 0.5923603177070618
},
"biasframes_offensive": {
"macro_f1": 0.5712165832519531,
"accuracy": 0.6439999938011169,
"macro_precision": 0.6866195797920227,
"macro_recall": 0.599557638168335
},
"biasframes_sex": {
"macro_f1": 0.07787039130926132,
"accuracy": 0.07900000363588333,
"macro_precision": 0.5074318051338196,
"macro_recall": 0.5030565857887268
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.7912979125976562,
"accuracy": 0.7919999957084656,
"macro_precision": 0.7932049632072449,
"macro_recall": 0.7910267114639282
},
"wikitoxic_obscene": {
"macro_f1": 0.7990478277206421,
"accuracy": 0.8059999942779541,
"macro_precision": 0.802010178565979,
"macro_recall": 0.796930193901062
},
"wikitoxic_threat": {
"macro_f1": 0.5021486282348633,
"accuracy": 0.6959999799728394,
"macro_precision": 0.5482288599014282,
"macro_recall": 0.7542791962623596
},
"wikitoxic_insult": {
"macro_f1": 0.7408590912818909,
"accuracy": 0.7480000257492065,
"macro_precision": 0.7388573884963989,
"macro_recall": 0.7451459765434265
}
}
} |
1.0 | {
"name": "Qwen3-Reranker-8B",
"model_type": "reranker",
"params": "8B",
"revision": "unknown",
"url": "https://huggingface.co/Qwen3-Reranker-8B"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.72244,
"accuracy": 0.764832,
"macro_precision": 0.763814,
"macro_recall": 0.763002
},
"by_task": {
"sentiment": {
"macro_f1": 0.92305,
"accuracy": 0.927717,
"macro_precision": 0.929232,
"macro_recall": 0.919381
},
"emotion": {
"macro_f1": 0.485845,
"accuracy": 0.525,
"macro_precision": 0.539821,
"macro_recall": 0.498886
},
"intent": {
"macro_f1": 0.700498,
"accuracy": 0.715,
"macro_precision": 0.745874,
"macro_recall": 0.732819
},
"topic": {
"macro_f1": 0.662018,
"accuracy": 0.733182,
"macro_precision": 0.719205,
"macro_recall": 0.733956
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9569586515426636,
"accuracy": 0.9570000171661377,
"macro_precision": 0.9571244120597839,
"macro_recall": 0.9568408727645874
},
"imdb": {
"macro_f1": 0.9499607682228088,
"accuracy": 0.949999988079071,
"macro_precision": 0.9500150084495544,
"macro_recall": 0.9499140977859497
},
"appreviews": {
"macro_f1": 0.9287347793579102,
"accuracy": 0.9290000200271606,
"macro_precision": 0.9341866970062256,
"macro_recall": 0.9285714626312256
},
"yelpreviews": {
"macro_f1": 0.9819982051849365,
"accuracy": 0.9819999933242798,
"macro_precision": 0.9820953607559204,
"macro_recall": 0.9824041128158569
},
"rottentomatoes": {
"macro_f1": 0.903188943862915,
"accuracy": 0.903377115726471,
"macro_precision": 0.9065382480621338,
"macro_recall": 0.903377115726471
},
"financialphrasebank": {
"macro_f1": 0.8174570798873901,
"accuracy": 0.8449275493621826,
"macro_precision": 0.8454323410987854,
"macro_recall": 0.7951804995536804
},
"emotiondair": {
"macro_f1": 0.48749345541000366,
"accuracy": 0.5609999895095825,
"macro_precision": 0.49956265091896057,
"macro_recall": 0.5077645778656006
},
"empathetic": {
"macro_f1": 0.4841960072517395,
"accuracy": 0.48899999260902405,
"macro_precision": 0.5800796151161194,
"macro_recall": 0.49000710248947144
},
"banking77": {
"macro_f1": 0.6905889511108398,
"accuracy": 0.6740000247955322,
"macro_precision": 0.7354130744934082,
"macro_recall": 0.7193227410316467
},
"biasframes_intent": {
"macro_f1": 0.737960934638977,
"accuracy": 0.7549999952316284,
"macro_precision": 0.7938579320907593,
"macro_recall": 0.7398936152458191
},
"massive": {
"macro_f1": 0.6729446649551392,
"accuracy": 0.7160000205039978,
"macro_precision": 0.7083504796028137,
"macro_recall": 0.7392416000366211
},
"agnews": {
"macro_f1": 0.7875484228134155,
"accuracy": 0.8019999861717224,
"macro_precision": 0.8350103497505188,
"macro_recall": 0.7995575666427612
},
"yahootopics": {
"macro_f1": 0.610032320022583,
"accuracy": 0.6710000038146973,
"macro_precision": 0.6458627581596375,
"macro_recall": 0.6013916730880737
},
"trueteacher": {
"macro_f1": 0.3602437376976013,
"accuracy": 0.5170000195503235,
"macro_precision": 0.7558139562606812,
"macro_recall": 0.51113361120224
},
"manifesto": {
"macro_f1": 0.3251327574253082,
"accuracy": 0.4560000002384186,
"macro_precision": 0.398116797208786,
"macro_recall": 0.34560656547546387
},
"capsotu": {
"macro_f1": 0.6593928337097168,
"accuracy": 0.6809999942779541,
"macro_precision": 0.7135266661643982,
"macro_recall": 0.694545328617096
},
"biasframes_offensive": {
"macro_f1": 0.7650375962257385,
"accuracy": 0.7760000228881836,
"macro_precision": 0.7809867858886719,
"macro_recall": 0.7607153654098511
},
"biasframes_sex": {
"macro_f1": 0.6409658193588257,
"accuracy": 0.8240000009536743,
"macro_precision": 0.6186710596084595,
"macro_recall": 0.8823120594024658
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.856735110282898,
"accuracy": 0.8569999933242798,
"macro_precision": 0.8630095720291138,
"macro_recall": 0.8586984872817993
},
"wikitoxic_obscene": {
"macro_f1": 0.8774905204772949,
"accuracy": 0.8790000081062317,
"macro_precision": 0.8755072355270386,
"macro_recall": 0.8845878839492798
},
"wikitoxic_threat": {
"macro_f1": 0.5778453350067139,
"accuracy": 0.7799999713897705,
"macro_precision": 0.5833333134651184,
"macro_recall": 0.8849372267723083
},
"wikitoxic_insult": {
"macro_f1": 0.8217689990997314,
"accuracy": 0.8220000267028809,
"macro_precision": 0.8414127826690674,
"macro_recall": 0.8500308990478516
}
}
} |
1.0 | {
"name": "bge-reranker-base",
"model_type": "reranker",
"params": "278M",
"revision": "unknown",
"url": "https://huggingface.co/bge-reranker-base"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.471124,
"accuracy": 0.491962,
"macro_precision": 0.52378,
"macro_recall": 0.529067
},
"by_task": {
"sentiment": {
"macro_f1": 0.620593,
"accuracy": 0.621526,
"macro_precision": 0.637115,
"macro_recall": 0.646325
},
"emotion": {
"macro_f1": 0.292529,
"accuracy": 0.3155,
"macro_precision": 0.356538,
"macro_recall": 0.302878
},
"intent": {
"macro_f1": 0.468799,
"accuracy": 0.478,
"macro_precision": 0.504352,
"macro_recall": 0.513411
},
"topic": {
"macro_f1": 0.422702,
"accuracy": 0.457182,
"macro_precision": 0.497668,
"macro_recall": 0.510503
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.6631579399108887,
"accuracy": 0.6639999747276306,
"macro_precision": 0.6639357209205627,
"macro_recall": 0.6631482839584351
},
"imdb": {
"macro_f1": 0.6949630975723267,
"accuracy": 0.6949999928474426,
"macro_precision": 0.6950551271438599,
"macro_recall": 0.6951839327812195
},
"appreviews": {
"macro_f1": 0.7903658151626587,
"accuracy": 0.7910000085830688,
"macro_precision": 0.7936471104621887,
"macro_recall": 0.7906106114387512
},
"yelpreviews": {
"macro_f1": 0.6496093273162842,
"accuracy": 0.6510000228881836,
"macro_precision": 0.6512120962142944,
"macro_recall": 0.6498012542724609
},
"rottentomatoes": {
"macro_f1": 0.5743533372879028,
"accuracy": 0.5759850144386292,
"macro_precision": 0.5771682262420654,
"macro_recall": 0.5759849548339844
},
"financialphrasebank": {
"macro_f1": 0.35110652446746826,
"accuracy": 0.35217392444610596,
"macro_precision": 0.44167008996009827,
"macro_recall": 0.5032211542129517
},
"emotiondair": {
"macro_f1": 0.2955600917339325,
"accuracy": 0.35499998927116394,
"macro_precision": 0.3168407380580902,
"macro_recall": 0.33640772104263306
},
"empathetic": {
"macro_f1": 0.2894975244998932,
"accuracy": 0.2759999930858612,
"macro_precision": 0.3962348699569702,
"macro_recall": 0.26934754848480225
},
"banking77": {
"macro_f1": 0.47857925295829773,
"accuracy": 0.4860000014305115,
"macro_precision": 0.5719695091247559,
"macro_recall": 0.5103347301483154
},
"biasframes_intent": {
"macro_f1": 0.4995477795600891,
"accuracy": 0.5019999742507935,
"macro_precision": 0.4995661973953247,
"macro_recall": 0.4995654821395874
},
"massive": {
"macro_f1": 0.42827051877975464,
"accuracy": 0.44600000977516174,
"macro_precision": 0.44151943922042847,
"macro_recall": 0.5303319692611694
},
"agnews": {
"macro_f1": 0.632666826248169,
"accuracy": 0.6549999713897705,
"macro_precision": 0.6737851500511169,
"macro_recall": 0.6561883687973022
},
"yahootopics": {
"macro_f1": 0.40837764739990234,
"accuracy": 0.47200000286102295,
"macro_precision": 0.4528435468673706,
"macro_recall": 0.4204748272895813
},
"trueteacher": {
"macro_f1": 0.4728476405143738,
"accuracy": 0.4729999899864197,
"macro_precision": 0.4728548526763916,
"macro_recall": 0.4728640913963318
},
"manifesto": {
"macro_f1": 0.1635625809431076,
"accuracy": 0.2720000147819519,
"macro_precision": 0.18218491971492767,
"macro_recall": 0.19312702119350433
},
"capsotu": {
"macro_f1": 0.39848554134368896,
"accuracy": 0.43799999356269836,
"macro_precision": 0.40748095512390137,
"macro_recall": 0.521611750125885
},
"biasframes_offensive": {
"macro_f1": 0.4740513563156128,
"accuracy": 0.4790000021457672,
"macro_precision": 0.4748896062374115,
"macro_recall": 0.47452253103256226
},
"biasframes_sex": {
"macro_f1": 0.3038731813430786,
"accuracy": 0.34599998593330383,
"macro_precision": 0.5227616429328918,
"macro_recall": 0.588146984577179
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.5650734901428223,
"accuracy": 0.5730000138282776,
"macro_precision": 0.5842276811599731,
"macro_recall": 0.5769000053405762
},
"wikitoxic_obscene": {
"macro_f1": 0.54896080493927,
"accuracy": 0.550000011920929,
"macro_precision": 0.5764907598495483,
"macro_recall": 0.5732912421226501
},
"wikitoxic_threat": {
"macro_f1": 0.2721618413925171,
"accuracy": 0.3109999895095825,
"macro_precision": 0.5203135013580322,
"macro_recall": 0.596281886100769
},
"wikitoxic_insult": {
"macro_f1": 0.4096657335758209,
"accuracy": 0.46000000834465027,
"macro_precision": 0.6065172553062439,
"macro_recall": 0.5421292781829834
}
}
} |
1.0 | {
"name": "bge-reranker-large",
"model_type": "reranker",
"params": "560M",
"revision": "unknown",
"url": "https://huggingface.co/bge-reranker-large"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.534818,
"accuracy": 0.556759,
"macro_precision": 0.610169,
"macro_recall": 0.602469
},
"by_task": {
"sentiment": {
"macro_f1": 0.781011,
"accuracy": 0.780451,
"macro_precision": 0.788273,
"macro_recall": 0.79803
},
"emotion": {
"macro_f1": 0.367727,
"accuracy": 0.3775,
"macro_precision": 0.464957,
"macro_recall": 0.402225
},
"intent": {
"macro_f1": 0.536768,
"accuracy": 0.543,
"macro_precision": 0.56468,
"macro_recall": 0.580803
},
"topic": {
"macro_f1": 0.430379,
"accuracy": 0.471091,
"macro_precision": 0.55183,
"macro_recall": 0.538116
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.8699994683265686,
"accuracy": 0.8700000047683716,
"macro_precision": 0.8704652786254883,
"macro_recall": 0.8705364465713501
},
"imdb": {
"macro_f1": 0.8030894994735718,
"accuracy": 0.8040000200271606,
"macro_precision": 0.8143559694290161,
"macro_recall": 0.8063130378723145
},
"appreviews": {
"macro_f1": 0.8909868001937866,
"accuracy": 0.890999972820282,
"macro_precision": 0.891020655632019,
"macro_recall": 0.8909690380096436
},
"yelpreviews": {
"macro_f1": 0.8786844611167908,
"accuracy": 0.8790000081062317,
"macro_precision": 0.8870046734809875,
"macro_recall": 0.880921483039856
},
"rottentomatoes": {
"macro_f1": 0.7559291124343872,
"accuracy": 0.7560975551605225,
"macro_precision": 0.7568062543869019,
"macro_recall": 0.7560975551605225
},
"financialphrasebank": {
"macro_f1": 0.48737820982933044,
"accuracy": 0.48260870575904846,
"macro_precision": 0.509984016418457,
"macro_recall": 0.5833433270454407
},
"emotiondair": {
"macro_f1": 0.3911106586456299,
"accuracy": 0.41499999165534973,
"macro_precision": 0.43648621439933777,
"macro_recall": 0.47783219814300537
},
"empathetic": {
"macro_f1": 0.34434404969215393,
"accuracy": 0.3400000035762787,
"macro_precision": 0.4934283196926117,
"macro_recall": 0.3266182839870453
},
"banking77": {
"macro_f1": 0.5650694370269775,
"accuracy": 0.5569999814033508,
"macro_precision": 0.6598912477493286,
"macro_recall": 0.584199845790863
},
"biasframes_intent": {
"macro_f1": 0.5915988683700562,
"accuracy": 0.597000002861023,
"macro_precision": 0.5933622121810913,
"macro_recall": 0.5916775465011597
},
"massive": {
"macro_f1": 0.4536345899105072,
"accuracy": 0.4749999940395355,
"macro_precision": 0.4407878816127777,
"macro_recall": 0.5665310025215149
},
"agnews": {
"macro_f1": 0.726889431476593,
"accuracy": 0.7419999837875366,
"macro_precision": 0.770831823348999,
"macro_recall": 0.7403790950775146
},
"yahootopics": {
"macro_f1": 0.5268266797065735,
"accuracy": 0.593999981880188,
"macro_precision": 0.5396367311477661,
"macro_recall": 0.5347358584403992
},
"trueteacher": {
"macro_f1": 0.4441928267478943,
"accuracy": 0.4490000009536743,
"macro_precision": 0.44633111357688904,
"macro_recall": 0.44794851541519165
},
"manifesto": {
"macro_f1": 0.16162124276161194,
"accuracy": 0.3199999928474426,
"macro_precision": 0.2439526617527008,
"macro_recall": 0.17478394508361816
},
"capsotu": {
"macro_f1": 0.47877487540245056,
"accuracy": 0.5170000195503235,
"macro_precision": 0.507485032081604,
"macro_recall": 0.5725051164627075
},
"biasframes_offensive": {
"macro_f1": 0.529961347579956,
"accuracy": 0.5789999961853027,
"macro_precision": 0.5605754852294922,
"macro_recall": 0.5455132126808167
},
"biasframes_sex": {
"macro_f1": 0.20037832856178284,
"accuracy": 0.2070000022649765,
"macro_precision": 0.534077525138855,
"macro_recall": 0.5790870189666748
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.6028369069099426,
"accuracy": 0.6150000095367432,
"macro_precision": 0.6396280527114868,
"macro_recall": 0.6199691295623779
},
"wikitoxic_obscene": {
"macro_f1": 0.5302799940109253,
"accuracy": 0.5559999942779541,
"macro_precision": 0.6795343160629272,
"macro_recall": 0.6108042001724243
},
"wikitoxic_threat": {
"macro_f1": 0.1538538932800293,
"accuracy": 0.15800000727176666,
"macro_precision": 0.52483069896698,
"macro_recall": 0.5596234202384949
},
"wikitoxic_insult": {
"macro_f1": 0.37854886054992676,
"accuracy": 0.4449999928474426,
"macro_precision": 0.6232432126998901,
"macro_recall": 0.5339279174804688
}
}
} |
1.0 | {
"name": "gte-reranker-modernbert-base",
"model_type": "reranker",
"params": "149M",
"revision": "unknown",
"url": "https://huggingface.co/gte-reranker-modernbert-base"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.57849,
"accuracy": 0.616544,
"macro_precision": 0.625188,
"macro_recall": 0.61616
},
"by_task": {
"sentiment": {
"macro_f1": 0.82026,
"accuracy": 0.818163,
"macro_precision": 0.853517,
"macro_recall": 0.846494
},
"emotion": {
"macro_f1": 0.422199,
"accuracy": 0.4575,
"macro_precision": 0.472394,
"macro_recall": 0.434904
},
"intent": {
"macro_f1": 0.514074,
"accuracy": 0.557,
"macro_precision": 0.595518,
"macro_recall": 0.583573
},
"topic": {
"macro_f1": 0.492599,
"accuracy": 0.551727,
"macro_precision": 0.536518,
"macro_recall": 0.532366
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.9113131761550903,
"accuracy": 0.9120000004768372,
"macro_precision": 0.9194884896278381,
"macro_recall": 0.9103273749351501
},
"imdb": {
"macro_f1": 0.8373899459838867,
"accuracy": 0.8410000205039978,
"macro_precision": 0.8644256591796875,
"macro_recall": 0.8376922607421875
},
"appreviews": {
"macro_f1": 0.916504979133606,
"accuracy": 0.9169999957084656,
"macro_precision": 0.9254856705665588,
"macro_recall": 0.9164426326751709
},
"yelpreviews": {
"macro_f1": 0.9629937410354614,
"accuracy": 0.9629999995231628,
"macro_precision": 0.9629999995231628,
"macro_recall": 0.9633132219314575
},
"rottentomatoes": {
"macro_f1": 0.7976751327514648,
"accuracy": 0.8020637631416321,
"macro_precision": 0.8307623863220215,
"macro_recall": 0.8020638227462769
},
"financialphrasebank": {
"macro_f1": 0.4956844449043274,
"accuracy": 0.4739130437374115,
"macro_precision": 0.6179410815238953,
"macro_recall": 0.6491255760192871
},
"emotiondair": {
"macro_f1": 0.45084241032600403,
"accuracy": 0.5210000276565552,
"macro_precision": 0.4515022337436676,
"macro_recall": 0.48167750239372253
},
"empathetic": {
"macro_f1": 0.3935551643371582,
"accuracy": 0.39399999380111694,
"macro_precision": 0.4932851195335388,
"macro_recall": 0.38813093304634094
},
"banking77": {
"macro_f1": 0.6466980576515198,
"accuracy": 0.6349999904632568,
"macro_precision": 0.676284670829773,
"macro_recall": 0.6737946271896362
},
"biasframes_intent": {
"macro_f1": 0.35648372769355774,
"accuracy": 0.4699999988079071,
"macro_precision": 0.5298737287521362,
"macro_recall": 0.5048359632492065
},
"massive": {
"macro_f1": 0.5390387177467346,
"accuracy": 0.5659999847412109,
"macro_precision": 0.5803945064544678,
"macro_recall": 0.5720893144607544
},
"agnews": {
"macro_f1": 0.6849942207336426,
"accuracy": 0.7120000123977661,
"macro_precision": 0.7636871337890625,
"macro_recall": 0.7090103626251221
},
"yahootopics": {
"macro_f1": 0.3708208203315735,
"accuracy": 0.3880000114440918,
"macro_precision": 0.5611310005187988,
"macro_recall": 0.3892977833747864
},
"trueteacher": {
"macro_f1": 0.5798214077949524,
"accuracy": 0.5979999899864197,
"macro_precision": 0.6142182350158691,
"macro_recall": 0.5955897569656372
},
"manifesto": {
"macro_f1": 0.17288586497306824,
"accuracy": 0.25600001215934753,
"macro_precision": 0.23649853467941284,
"macro_recall": 0.2129371166229248
},
"capsotu": {
"macro_f1": 0.4171936810016632,
"accuracy": 0.47999998927116394,
"macro_precision": 0.45853084325790405,
"macro_recall": 0.4752907454967499
},
"biasframes_offensive": {
"macro_f1": 0.5220422744750977,
"accuracy": 0.5239999890327454,
"macro_precision": 0.5244028568267822,
"macro_recall": 0.5248481631278992
},
"biasframes_sex": {
"macro_f1": 0.5966007709503174,
"accuracy": 0.8489999771118164,
"macro_precision": 0.5789476037025452,
"macro_recall": 0.6852442026138306
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.5468690395355225,
"accuracy": 0.546999990940094,
"macro_precision": 0.5479526519775391,
"macro_recall": 0.5478123426437378
},
"wikitoxic_obscene": {
"macro_f1": 0.5600804090499878,
"accuracy": 0.5659999847412109,
"macro_precision": 0.5607047080993652,
"macro_recall": 0.5620940923690796
},
"wikitoxic_threat": {
"macro_f1": 0.4324122369289398,
"accuracy": 0.6010000109672546,
"macro_precision": 0.5204809308052063,
"macro_recall": 0.6178680062294006
},
"wikitoxic_insult": {
"macro_f1": 0.5348722338676453,
"accuracy": 0.5479999780654907,
"macro_precision": 0.5351483821868896,
"macro_recall": 0.5360356569290161
}
}
} |
1.0 | {
"name": "ms-marco-MiniLM-L6-v2",
"model_type": "reranker",
"params": "22M",
"revision": "unknown",
"url": "https://huggingface.co/ms-marco-MiniLM-L6-v2"
} | {
"btzsc_version": "0.1.1",
"btzsc_commit": "8a0d52cbe423",
"timestamp": "2026-03-01T19:47:17.282165+00:00",
"device": "unknown",
"precision": "unknown",
"batch_size": 32,
"max_samples": null
} | {
"overall": {
"macro_f1": 0.421785,
"accuracy": 0.460398,
"macro_precision": 0.486364,
"macro_recall": 0.456056
},
"by_task": {
"sentiment": {
"macro_f1": 0.58518,
"accuracy": 0.590793,
"macro_precision": 0.625615,
"macro_recall": 0.62621
},
"emotion": {
"macro_f1": 0.191836,
"accuracy": 0.1925,
"macro_precision": 0.337519,
"macro_recall": 0.226099
},
"intent": {
"macro_f1": 0.30124,
"accuracy": 0.292,
"macro_precision": 0.399039,
"macro_recall": 0.298178
},
"topic": {
"macro_f1": 0.407345,
"accuracy": 0.483909,
"macro_precision": 0.461287,
"macro_recall": 0.448113
}
},
"by_dataset": {
"amazonpolarity": {
"macro_f1": 0.6817625761032104,
"accuracy": 0.6880000233650208,
"macro_precision": 0.7118111848831177,
"macro_recall": 0.6921078562736511
},
"imdb": {
"macro_f1": 0.6169463396072388,
"accuracy": 0.625,
"macro_precision": 0.6434303522109985,
"macro_recall": 0.6291953325271606
},
"appreviews": {
"macro_f1": 0.7086011171340942,
"accuracy": 0.7120000123977661,
"macro_precision": 0.7207171320915222,
"macro_recall": 0.7111815214157104
},
"yelpreviews": {
"macro_f1": 0.6500681638717651,
"accuracy": 0.6539999842643738,
"macro_precision": 0.666526734828949,
"macro_recall": 0.6572002172470093
},
"rottentomatoes": {
"macro_f1": 0.5772273540496826,
"accuracy": 0.5787992477416992,
"macro_precision": 0.5799888372421265,
"macro_recall": 0.5787992477416992
},
"financialphrasebank": {
"macro_f1": 0.27647721767425537,
"accuracy": 0.2869565188884735,
"macro_precision": 0.4312140643596649,
"macro_recall": 0.4887731969356537
},
"emotiondair": {
"macro_f1": 0.20086175203323364,
"accuracy": 0.20399999618530273,
"macro_precision": 0.325905442237854,
"macro_recall": 0.2706259787082672
},
"empathetic": {
"macro_f1": 0.1828111708164215,
"accuracy": 0.1809999942779541,
"macro_precision": 0.34913337230682373,
"macro_recall": 0.18157251179218292
},
"banking77": {
"macro_f1": 0.23708733916282654,
"accuracy": 0.2199999988079071,
"macro_precision": 0.4417795240879059,
"macro_recall": 0.21929438412189484
},
"biasframes_intent": {
"macro_f1": 0.6136585474014282,
"accuracy": 0.6169999837875366,
"macro_precision": 0.6142018437385559,
"macro_recall": 0.6134754419326782
},
"massive": {
"macro_f1": 0.05297502502799034,
"accuracy": 0.039000000804662704,
"macro_precision": 0.14113575220108032,
"macro_recall": 0.06176289916038513
},
"agnews": {
"macro_f1": 0.39791375398635864,
"accuracy": 0.4180000126361847,
"macro_precision": 0.5021059513092041,
"macro_recall": 0.4154464900493622
},
"yahootopics": {
"macro_f1": 0.298456072807312,
"accuracy": 0.3269999921321869,
"macro_precision": 0.3344421088695526,
"macro_recall": 0.2964455485343933
},
"trueteacher": {
"macro_f1": 0.3914633095264435,
"accuracy": 0.5180000066757202,
"macro_precision": 0.566315770149231,
"macro_recall": 0.5126017928123474
},
"manifesto": {
"macro_f1": 0.07393017411231995,
"accuracy": 0.14000000059604645,
"macro_precision": 0.1313643604516983,
"macro_recall": 0.09652630239725113
},
"capsotu": {
"macro_f1": 0.29766881465911865,
"accuracy": 0.3230000138282776,
"macro_precision": 0.36105501651763916,
"macro_recall": 0.3598368167877197
},
"biasframes_offensive": {
"macro_f1": 0.6154738664627075,
"accuracy": 0.6169999837875366,
"macro_precision": 0.6441656351089478,
"macro_recall": 0.6368777751922607
},
"biasframes_sex": {
"macro_f1": 0.46941402554512024,
"accuracy": 0.7459999918937683,
"macro_precision": 0.4977855384349823,
"macro_recall": 0.4930448830127716
},
"wikitoxic_toxicaggregated": {
"macro_f1": 0.4693637490272522,
"accuracy": 0.5019999742507935,
"macro_precision": 0.49472469091415405,
"macro_recall": 0.49588721990585327
},
"wikitoxic_obscene": {
"macro_f1": 0.5130991339683533,
"accuracy": 0.5509999990463257,
"macro_precision": 0.5221269130706787,
"macro_recall": 0.5192086100578308
},
"wikitoxic_threat": {
"macro_f1": 0.45319265127182007,
"accuracy": 0.6539999842643738,
"macro_precision": 0.5189011096954346,
"macro_recall": 0.6022251844406128
},
"wikitoxic_insult": {
"macro_f1": 0.5008226633071899,
"accuracy": 0.5270000100135803,
"macro_precision": 0.5011672973632812,
"macro_recall": 0.5011394023895264
}
}
} |
BTZSC Results
This repository stores model submissions for the BTZSC leaderboard.
BTZSC: A Benchmark for Zero-Shot Text Classification across Cross-Encoders, Embedding Models, Rerankers and LLMs.
- Paper: https://openreview.net/forum?id=IxMryAz2p3
- Eval harness: https://github.com/IliasAarab/btzsc
- Leaderboard Space: https://huggingface.co/spaces/btzsc/btzsc-leaderboard
Benchmark summary:
- 22 English single-label datasets
- 4 task families: sentiment, topic, intent, emotion
- Strict zero-shot protocol (no BTZSC-label training/tuning)
- Primary metric: macro-F1
What this repo contains
- One JSON file per model evaluation run in
results/<model_type>/<model-name>.json - Reproducibility metadata (BTZSC version, commit, precision, batch size)
- Full per-dataset metrics for all 22 BTZSC datasets
Schema
Each submission follows schema version 1.0 with:
model: model id, type, parameter count, revisionevaluation: harness versioning and runtime metadataresults.overall: averaged macro F1 / accuracy / macro precision / macro recallresults.by_task: sentiment/topic/intent/emotion aggregatesresults.by_dataset: per-dataset metric blocks
Contributing results
Destination path format:
results/<model_type>/<model-name>.json
Recommended flow:
- Export with the official harness (
btzsc evaluate ... --output-json ...). - Validate locally (
python validate.py results/<model_type>/<model-name>.json). - Add your file at the required path.
- Submit by one of these methods:
- Web UI upload on Hugging Face (no clone required)
- Git workflow (direct push if you have write access, otherwise fork + PR)
- API workflow via
huggingface_hubwithcreate_pr=True(PR-based)
In short: add means placing the JSON at the correct path; submit means publishing that change to this remote repo.
See SUBMISSION.md for full requirements and review checks.
PRs adding result files are validated in CI with validate.py.
- Downloads last month
- 875
Size of downloaded dataset files:
209 kB
Size of the auto-converted Parquet files:
972 kB
Number of rows:
35