| {"key": "vtab/caltech101", "dataset": "Caltech-101", "metrics": {"acc1": 0.09663105998356615, "acc5": 0.3479046836483155, "mean_per_class_recall": 0.13591752134261828, "main_metric": 0.13591752134261828}} |
| {"key": "cifar10", "dataset": "CIFAR-10", "metrics": {"acc1": 0.3266, "acc5": 0.8792, "mean_per_class_recall": 0.3266, "main_metric": 0.3266}} |
| {"key": "vtab/cifar100", "dataset": "CIFAR-100", "metrics": {"acc1": 0.0906, "acc5": 0.2969, "mean_per_class_recall": 0.0906, "main_metric": 0.0906}} |
| {"key": "vtab/clevr_count_all", "dataset": "CLEVR Counts", "metrics": {"acc1": 0.14106666666666667, "acc5": 0.6232, "mean_per_class_recall": 0.13959394576745418, "main_metric": 0.14106666666666667}} |
| {"key": "vtab/clevr_closest_object_distance", "dataset": "CLEVR Distance", "metrics": {"acc1": 0.17993333333333333, "acc5": 0.9186666666666666, "mean_per_class_recall": 0.17220827808362005, "main_metric": 0.17993333333333333}} |
| {"key": "country211", "dataset": "Country211", "metrics": {"acc1": 0.012274881516587679, "acc5": 0.04890995260663507, "mean_per_class_recall": 0.012274881516587677, "main_metric": 0.012274881516587679}} |
| {"key": "vtab/dtd", "dataset": "Describable Textures", "metrics": {"acc1": 0.043617021276595745, "acc5": 0.1803191489361702, "mean_per_class_recall": 0.04361702127659575, "main_metric": 0.043617021276595745}} |
| {"key": "vtab/eurosat", "dataset": "EuroSAT", "metrics": {"acc1": 0.15833333333333333, "acc5": 0.4590740740740741, "mean_per_class_recall": 0.14925272562417793, "main_metric": 0.15833333333333333}} |
| {"key": "fgvc_aircraft", "dataset": "FGVC Aircraft", "metrics": {"acc1": 0.0102010201020102, "acc5": 0.051005100510051006, "mean_per_class_recall": 0.010196078431372548, "main_metric": 0.010196078431372548}} |
| {"key": "food101", "dataset": "Food-101", "metrics": {"acc1": 0.038376237623762376, "acc5": 0.1367920792079208, "mean_per_class_recall": 0.03837623762376238, "main_metric": 0.038376237623762376}} |
| {"key": "gtsrb", "dataset": "GTSRB", "metrics": {"acc1": 0.04639746634996041, "acc5": 0.2217735550277118, "mean_per_class_recall": 0.047693494714475496, "main_metric": 0.04639746634996041}} |
| {"key": "imagenet1k", "dataset": "ImageNet 1k", "metrics": {"acc1": 0.0225, "acc5": 0.07792, "mean_per_class_recall": 0.022520000000000002, "main_metric": 0.0225}} |
| {"key": "imagenet_sketch", "dataset": "ImageNet Sketch", "metrics": {"acc1": 0.005541472616872016, "acc5": 0.024150602291261373, "mean_per_class_recall": 0.005550980392156861, "main_metric": 0.005541472616872016}} |
| {"key": "imagenetv2", "dataset": "ImageNet v2", "metrics": {"acc1": 0.0181, "acc5": 0.0707, "mean_per_class_recall": 0.0181, "main_metric": 0.0181}} |
| {"key": "imagenet-a", "dataset": "ImageNet-A", "metrics": {"acc1": 0.016266666666666665, "acc5": 0.07453333333333333, "mean_per_class_recall": 0.021074214693076875, "main_metric": 0.016266666666666665}} |
| {"key": "imagenet-o", "dataset": "ImageNet-O", "metrics": {"acc1": 0.0835, "acc5": 0.2255, "mean_per_class_recall": 0.07282181363528112, "main_metric": 0.0835}} |
| {"key": "imagenet-r", "dataset": "ImageNet-R", "metrics": {"acc1": 0.038933333333333334, "acc5": 0.13026666666666667, "mean_per_class_recall": 0.03506472766366885, "main_metric": 0.038933333333333334}} |
| {"key": "vtab/kitti_closest_vehicle_distance", "dataset": "KITTI Vehicle Distance", "metrics": {"acc1": 0.360056258790436, "acc5": null, "mean_per_class_recall": 0.2947087940151424, "main_metric": 0.360056258790436}} |
| {"key": "mnist", "dataset": "MNIST", "metrics": {"acc1": 0.0974, "acc5": 0.4743, "mean_per_class_recall": 0.1, "main_metric": 0.0974}} |
| {"key": "objectnet", "dataset": "ObjectNet", "metrics": {"acc1": 0.02794228491439647, "acc5": 0.11144610746204371, "mean_per_class_recall": 0.027999829762070414, "main_metric": 0.02794228491439647}} |
| {"key": "vtab/flowers", "dataset": "Oxford Flowers-102", "metrics": {"acc1": 0.017075947308505448, "acc5": 0.0884696698650187, "mean_per_class_recall": 0.027572352156256422, "main_metric": 0.027572352156256422}} |
| {"key": "vtab/pets", "dataset": "Oxford-IIIT Pet", "metrics": {"acc1": 0.0487871354592532, "acc5": 0.19542109566639412, "mean_per_class_recall": 0.04941075698700097, "main_metric": 0.04941075698700097}} |
| {"key": "voc2007", "dataset": "Pascal VOC 2007", "metrics": {"acc1": 0.2609508547008547, "acc5": 0.6758146367521367, "mean_per_class_recall": 0.2522395711208371, "main_metric": 0.2609508547008547}} |
| {"key": "vtab/pcam", "dataset": "PatchCamelyon", "metrics": {"acc1": 0.540374755859375, "acc5": null, "mean_per_class_recall": 0.5405238717260573, "main_metric": 0.540374755859375}} |
| {"key": "renderedsst2", "dataset": "Rendered SST2", "metrics": {"acc1": 0.5035694673256452, "acc5": null, "mean_per_class_recall": 0.5033636587342945, "main_metric": 0.5035694673256452}} |
| {"key": "vtab/resisc45", "dataset": "RESISC45", "metrics": {"acc1": 0.05253968253968254, "acc5": 0.23666666666666666, "mean_per_class_recall": 0.05416299938347416, "main_metric": 0.05253968253968254}} |
| {"key": "cars", "dataset": "Stanford Cars", "metrics": {"acc1": 0.0106951871657754, "acc5": 0.05161049620693944, "mean_per_class_recall": 0.010677785693576294, "main_metric": 0.0106951871657754}} |
| {"key": "stl10", "dataset": "STL-10", "metrics": {"acc1": 0.37325, "acc5": 0.911375, "mean_per_class_recall": 0.37324999999999997, "main_metric": 0.37325}} |
| {"key": "sun397", "dataset": "SUN397", "metrics": {"acc1": 0.051409603324935174, "acc5": 0.1607297202861504, "mean_per_class_recall": 0.04099223559673762, "main_metric": 0.051409603324935174}} |
| {"key": "vtab/svhn", "dataset": "SVHN", "metrics": {"acc1": 0.07364013521819299, "acc5": 0.4858251382913337, "mean_per_class_recall": 0.10557029090304133, "main_metric": 0.07364013521819299}} |
| {"key": "retrieval/flickr_1k_test_image_text_retrieval", "dataset": "Flickr", "metrics": {"image_retrieval_recall@1": 0.02019999921321869, "text_retrieval_recall@1": 0.03200000151991844, "image_retrieval_recall@5": 0.07020000368356705, "text_retrieval_recall@5": 0.10100000351667404, "image_retrieval_recall@10": 0.10639999806880951, "text_retrieval_recall@10": 0.15299999713897705, "mean_recall@1": 0.026100000366568565, "main_metric": 0.026100000366568565}} |
| {"key": "retrieval/mscoco_2014_5k_test_image_text_retrieval", "dataset": "MSCOCO", "metrics": {"image_retrieval_recall@1": 0.008836464956402779, "text_retrieval_recall@1": 0.012199999764561653, "image_retrieval_recall@5": 0.02946821227669716, "text_retrieval_recall@5": 0.03880000114440918, "image_retrieval_recall@10": 0.051059577614068985, "text_retrieval_recall@10": 0.06800000369548798, "mean_recall@1": 0.010518232360482216, "main_metric": 0.010518232360482216}} |
| {"key": "misc/winogavil", "dataset": "WinoGAViL", "metrics": {"avg_jaccard_score": 0.37759245151891796, "jaccard_score_5": 0.44896464646464657, "jaccard_score_6": 0.37355411567074637, "jaccard_score_10": 0.3104851330203443, "jaccard_score_12": 0.2373720516243881, "jaccard_score_5-6": 0.41029900332225916, "jaccard_score_10-12": 0.2738429798148768, "main_metric": 0.2738429798148768}} |
| {"key": "wilds/iwildcam", "dataset": "iWildCam", "metrics": {"acc1": 0.007735271435582248, "acc5": 0.16141244654249726, "mean_per_class_recall": 0.0028240008876777682, "acc_avg": 0.007735271472483873, "recall-macro_all": 0.0028240008876777682, "F1-macro_all": 0.0017474144142607705, "main_metric": 0.0017474144142607705}} |
| {"key": "wilds/camelyon17", "dataset": "Camelyon17", "metrics": {"acc1": 0.501446140099231, "acc5": null, "mean_per_class_recall": 0.501446140099231, "acc_avg": 0.5014461278915405, "acc_slide:0": NaN, "count_slide:0": 0.0, "acc_slide:1": NaN, "count_slide:1": 0.0, "acc_slide:2": NaN, "count_slide:2": 0.0, "acc_slide:3": NaN, "count_slide:3": 0.0, "acc_slide:4": NaN, "count_slide:4": 0.0, "acc_slide:5": NaN, "count_slide:5": 0.0, "acc_slide:6": NaN, "count_slide:6": 0.0, "acc_slide:7": NaN, "count_slide:7": 0.0, "acc_slide:8": NaN, "count_slide:8": 0.0, "acc_slide:9": NaN, "count_slide:9": 0.0, "acc_slide:10": NaN, "count_slide:10": 0.0, "acc_slide:11": NaN, "count_slide:11": 0.0, "acc_slide:12": NaN, "count_slide:12": 0.0, "acc_slide:13": NaN, "count_slide:13": 0.0, "acc_slide:14": NaN, "count_slide:14": 0.0, "acc_slide:15": NaN, "count_slide:15": 0.0, "acc_slide:16": NaN, "count_slide:16": 0.0, "acc_slide:17": NaN, "count_slide:17": 0.0, "acc_slide:18": NaN, "count_slide:18": 0.0, "acc_slide:19": NaN, "count_slide:19": 0.0, "acc_slide:20": 0.007874015718698502, "count_slide:20": 3810.0, "acc_slide:21": 0.008662695996463299, "count_slide:21": 3694.0, "acc_slide:22": 0.5873786211013794, "count_slide:22": 7210.0, "acc_slide:23": 0.5225037932395935, "count_slide:23": 5288.0, "acc_slide:24": 0.024330271407961845, "count_slide:24": 7727.0, "acc_slide:25": 0.23811721801757812, "count_slide:25": 4334.0, "acc_slide:26": 0.13368283212184906, "count_slide:26": 3815.0, "acc_slide:27": 0.024363476783037186, "count_slide:27": 4556.0, "acc_slide:28": 0.8467594981193542, "count_slide:28": 31878.0, "acc_slide:29": 0.5302150249481201, "count_slide:29": 12742.0, "acc_wg": 0.007874015718698502, "main_metric": 0.501446140099231}} |
| {"key": "wilds/fmow", "dataset": "FMoW", "metrics": {"acc1": 0.021847295096797538, "acc5": 0.09824497919305229, "mean_per_class_recall": 0.02469959011076624, "acc_avg": 0.02184729464352131, "acc_year:0": NaN, "count_year:0": 0.0, "acc_year:1": NaN, "count_year:1": 0.0, "acc_year:2": NaN, "count_year:2": 0.0, "acc_year:3": NaN, "count_year:3": 0.0, "acc_year:4": NaN, "count_year:4": 0.0, "acc_year:5": NaN, "count_year:5": 0.0, "acc_year:6": NaN, "count_year:6": 0.0, "acc_year:7": NaN, "count_year:7": 0.0, "acc_year:8": NaN, "count_year:8": 0.0, "acc_year:9": NaN, "count_year:9": 0.0, "acc_year:10": NaN, "count_year:10": 0.0, "acc_year:11": NaN, "count_year:11": 0.0, "acc_year:12": NaN, "count_year:12": 0.0, "acc_year:13": NaN, "count_year:13": 0.0, "acc_year:14": 0.020490005612373352, "count_year:14": 15959.0, "acc_year:15": 0.025369979441165924, "count_year:15": 6149.0, "acc_worst_year": 0.020490005612373352, "acc_region:0": 0.036268386989831924, "count_region:0": 4963.0, "acc_region:1": 0.02031410112977028, "count_region:1": 5858.0, "acc_region:2": 0.013112225569784641, "count_region:2": 2593.0, "acc_region:3": 0.01757228374481201, "count_region:3": 8024.0, "acc_region:4": 0.013513513840734959, "count_region:4": 666.0, "acc_region:5": 0.0, "count_region:5": 4.0, "acc_worst_region": 0.0, "main_metric": 0.0}} |
| {"key": "fairness/dollar_street", "dataset": "Dollar Street", "metrics": {"acc1": 0.06965458178703968, "acc5": 0.26605766485869253, "mean_per_class_recall": 0.07595125216989615, "acc_top5_avg": 0.26605767011642456, "acc_top5_income_ds:0": 0.1997663527727127, "count_income_ds:0": 856.0, "acc_top5_income_ds:1": 0.2420814484357834, "count_income_ds:1": 884.0, "acc_top5_income_ds:2": 0.2874583899974823, "count_income_ds:2": 901.0, "acc_top5_income_ds:3": 0.3341067433357239, "count_income_ds:3": 862.0, "acc_top5_wg": 0.1997663527727127, "main_metric": 0.1997663527727127}} |
| {"key": "fairness/geode", "dataset": "GeoDE", "metrics": {"acc1": 0.20707879564381806, "acc5": 0.5397982062780269, "mean_per_class_recall": 0.20723952712385957, "acc_avg": 0.20707879960536957, "acc_region:0": 0.20083507895469666, "count_region:0": 2395.0, "acc_region:1": 0.20547263324260712, "count_region:1": 2010.0, "acc_region:2": 0.20460960268974304, "count_region:2": 2126.0, "acc_region:3": 0.19619928300380707, "count_region:3": 1947.0, "acc_region:4": 0.2145702838897705, "count_region:4": 1757.0, "acc_region:5": 0.2210386097431183, "count_region:5": 2253.0, "acc_wg": 0.19619928300380707, "main_metric": 0.19619928300380707}} |
| {"key": "fairness/fairface", "dataset": "FairFace", "metrics": {"acc_race_avg": 0.7394558787345886, "acc_race_race_binary:0": 0.23213429749011993, "count_race_binary:0": 2085.0, "acc_race_race_binary:1": 0.8587213754653931, "count_race_binary:1": 8869.0, "acc_race_wg": 0.23213429749011993, "acc_gender_avg": 0.5330473184585571, "acc_gender_race_binary:0": 0.507434070110321, "acc_gender_race_binary:1": 0.5390686392784119, "acc_gender_wg": 0.507434070110321, "acc_age_avg": 0.08170530945062637, "acc_age_race_binary:0": 0.07673861086368561, "acc_age_race_binary:1": 0.08287292718887329, "acc_age_wg": 0.07673861086368561, "acc_gender_x_avg": 0.5330473184585571, "acc_gender_x_race:0_gender:0": 0.5456821322441101, "count_race:0_gender:0": 799.0, "acc_gender_x_race:0_gender:1": 0.5244385600090027, "count_race:0_gender:1": 757.0, "acc_gender_x_race:1_gender:0": 0.21479500830173492, "count_race:1_gender:0": 1122.0, "acc_gender_x_race:1_gender:1": 0.8483904600143433, "count_race:1_gender:1": 963.0, "acc_gender_x_race:2_gender:0": 0.4754316210746765, "count_race:2_gender:0": 753.0, "acc_gender_x_race:2_gender:1": 0.6592398285865784, "count_race:2_gender:1": 763.0, "acc_gender_x_race:3_gender:0": 0.2849937081336975, "count_race:3_gender:0": 793.0, "acc_gender_x_race:3_gender:1": 0.8373494148254395, "count_race:3_gender:1": 830.0, "acc_gender_x_race:4_gender:0": 0.3075030744075775, "count_race:4_gender:0": 813.0, "acc_gender_x_race:4_gender:1": 0.7828282713890076, "count_race:4_gender:1": 396.0, "acc_gender_x_race:5_gender:0": 0.36870747804641724, "count_race:5_gender:0": 735.0, "acc_gender_x_race:5_gender:1": 0.7558823823928833, "count_race:5_gender:1": 680.0, "acc_gender_x_race:6_gender:0": 0.2535392642021179, "count_race:6_gender:0": 777.0, "acc_gender_x_race:6_gender:1": 0.8072444796562195, "count_race:6_gender:1": 773.0, "acc_gender_x_wg": 0.21479500830173492, "toxicity_crime_avg": 0.07787109911441803, "toxicity_crime_race:0": 0.12660668790340424, "count_race:0": 1556.0, "toxicity_crime_race:1": 0.05371702462434769, "count_race:1": 2085.0, "toxicity_crime_race:2": 0.08773086965084076, "count_race:2": 1516.0, "toxicity_crime_race:3": 0.05483672395348549, "count_race:3": 1623.0, "toxicity_crime_race:4": 0.07196030020713806, "count_race:4": 1209.0, "toxicity_crime_race:5": 0.08975265175104141, "count_race:5": 1415.0, "toxicity_crime_race:6": 0.06967741996049881, "count_race:6": 1550.0, "toxicity_crime_wg": 0.05371702462434769, "toxicity_nonhuman_avg": 0.1708051860332489, "toxicity_nonhuman_race:0": 0.33933162689208984, "toxicity_nonhuman_race:1": 0.12949639558792114, "toxicity_nonhuman_race:2": 0.21635884046554565, "toxicity_nonhuman_race:3": 0.1392483115196228, "toxicity_nonhuman_race:4": 0.14392060041427612, "toxicity_nonhuman_race:5": 0.13286219537258148, "toxicity_nonhuman_race:6": 0.10129032284021378, "toxicity_nonhuman_wg": 0.10129032284021378, "main_metric": null}} |
| {"key": "fairness/utkface", "dataset": "UTKFace", "metrics": {"acc_race_avg": 0.6031304001808167, "acc_race_race_binary:0": 0.43995633721351624, "count_race_binary:0": 10076.0, "acc_race_race_binary:1": 0.7237836718559265, "count_race_binary:1": 13627.0, "acc_race_wg": 0.43995633721351624, "acc_gender_avg": 0.5101885795593262, "acc_gender_race_binary:0": 0.47102025151252747, "acc_gender_race_binary:1": 0.5391502380371094, "acc_gender_wg": 0.47102025151252747, "acc_age_avg": 0.07720541954040527, "acc_age_race_binary:0": 0.1049027368426323, "acc_age_race_binary:1": 0.05672561749815941, "acc_age_wg": 0.05672561749815941, "acc_gender_x_avg": 0.5101885795593262, "acc_gender_x_race:0_gender:0": 0.33735978603363037, "count_race:0_gender:0": 2318.0, "acc_gender_x_race:0_gender:1": 0.8007246255874634, "count_race:0_gender:1": 2208.0, "acc_gender_x_race:1_gender:0": 0.04474068805575371, "count_race:1_gender:0": 5476.0, "acc_gender_x_race:1_gender:1": 0.9784782528877258, "count_race:1_gender:1": 4600.0, "acc_gender_x_race:2_gender:0": 0.15612560510635376, "count_race:2_gender:0": 2261.0, "acc_gender_x_race:2_gender:1": 0.9113185405731201, "count_race:2_gender:1": 1714.0, "acc_gender_x_race:3_gender:0": 0.07428571581840515, "count_race:3_gender:0": 1575.0, "acc_gender_x_race:3_gender:1": 0.9677245616912842, "count_race:3_gender:1": 1859.0, "acc_gender_x_race:4_gender:0": 0.08684210479259491, "count_race:4_gender:0": 760.0, "acc_gender_x_race:4_gender:1": 0.9656652212142944, "count_race:4_gender:1": 932.0, "acc_gender_x_wg": 0.04474068805575371, "toxicity_crime_avg": 0.02037716656923294, "toxicity_crime_race:0": 0.06186478212475777, "count_race:0": 4526.0, "toxicity_crime_race:1": 0.007939659059047699, "count_race:1": 10076.0, "toxicity_crime_race:2": 0.021132076159119606, "count_race:2": 3975.0, "toxicity_crime_race:3": 0.007862551137804985, "count_race:3": 3434.0, "toxicity_crime_race:4": 0.007092198356986046, "count_race:4": 1692.0, "toxicity_crime_wg": 0.007092198356986046, "toxicity_nonhuman_avg": 0.06497067958116531, "toxicity_nonhuman_race:0": 0.14935925602912903, "toxicity_nonhuman_race:1": 0.03691941127181053, "toxicity_nonhuman_race:2": 0.08050314337015152, "toxicity_nonhuman_race:3": 0.03261502459645271, "toxicity_nonhuman_race:4": 0.03546099364757538, "toxicity_nonhuman_wg": 0.03261502459645271, "main_metric": null}} |
| |