{"key": "vtab/caltech101", "dataset": "Caltech-101", "metrics": {"acc1": 0.8399342645850452, "acc5": 0.9577649958915365, "mean_per_class_recall": 0.9286383859484808, "main_metric": 0.9286383859484808}} {"key": "cifar10", "dataset": "CIFAR-10", "metrics": {"acc1": 0.9318, "acc5": 0.9989, "mean_per_class_recall": 0.9318, "main_metric": 0.9318}} {"key": "vtab/cifar100", "dataset": "CIFAR-100", "metrics": {"acc1": 0.7486, "acc5": 0.9385, "mean_per_class_recall": 0.7485999999999998, "main_metric": 0.7486}} {"key": "vtab/clevr_count_all", "dataset": "CLEVR Counts", "metrics": {"acc1": 0.2856666666666667, "acc5": 0.8654, "mean_per_class_recall": 0.2840506199661849, "main_metric": 0.2856666666666667}} {"key": "vtab/clevr_closest_object_distance", "dataset": "CLEVR Distance", "metrics": {"acc1": 0.22526666666666667, "acc5": 0.9186666666666666, "mean_per_class_recall": 0.1590147215937152, "main_metric": 0.22526666666666667}} {"key": "country211", "dataset": "Country211", "metrics": {"acc1": 0.13834123222748815, "acc5": 0.32630331753554503, "mean_per_class_recall": 0.13834123222748818, "main_metric": 0.13834123222748815}} {"key": "vtab/dtd", "dataset": "Describable Textures", "metrics": {"acc1": 0.575, "acc5": 0.8734042553191489, "mean_per_class_recall": 0.575, "main_metric": 0.575}} {"key": "vtab/eurosat", "dataset": "EuroSAT", "metrics": {"acc1": 0.5016666666666667, "acc5": 0.9498148148148148, "mean_per_class_recall": 0.498954533762556, "main_metric": 0.5016666666666667}} {"key": "fgvc_aircraft", "dataset": "FGVC Aircraft", "metrics": {"acc1": 0.20192019201920192, "acc5": 0.5055505550555055, "mean_per_class_recall": 0.20156862745098042, "main_metric": 0.20156862745098042}} {"key": "food101", "dataset": "Food-101", "metrics": {"acc1": 0.8251881188118811, "acc5": 0.9696237623762376, "mean_per_class_recall": 0.8251881188118811, "main_metric": 0.8251881188118811}} {"key": "gtsrb", "dataset": "GTSRB", "metrics": {"acc1": 0.46737925574030087, "acc5": 0.7756136183689628, "mean_per_class_recall": 0.40438608489215166, "main_metric": 0.46737925574030087}} {"key": "imagenet1k", "dataset": "ImageNet 1k", "metrics": {"acc1": 0.67786, "acc5": 0.91156, "mean_per_class_recall": 0.6778, "main_metric": 0.67786}} {"key": "imagenet_sketch", "dataset": "ImageNet Sketch", "metrics": {"acc1": 0.5552280453536128, "acc5": 0.8130833775472106, "mean_per_class_recall": 0.5555388235294118, "main_metric": 0.5552280453536128}} {"key": "imagenetv2", "dataset": "ImageNet v2", "metrics": {"acc1": 0.5994, "acc5": 0.858, "mean_per_class_recall": 0.5997, "main_metric": 0.5994}} {"key": "imagenet-a", "dataset": "ImageNet-A", "metrics": {"acc1": 0.2648, "acc5": 0.5924, "mean_per_class_recall": 0.29336042828497666, "main_metric": 0.2648}} {"key": "imagenet-o", "dataset": "ImageNet-O", "metrics": {"acc1": 0.5385, "acc5": 0.854, "mean_per_class_recall": 0.5482336509809419, "main_metric": 0.5385}} {"key": "imagenet-r", "dataset": "ImageNet-R", "metrics": {"acc1": 0.7864, "acc5": 0.9326666666666666, "mean_per_class_recall": 0.7706028790741525, "main_metric": 0.7864}} {"key": "vtab/kitti_closest_vehicle_distance", "dataset": "KITTI Vehicle Distance", "metrics": {"acc1": 0.2109704641350211, "acc5": null, "mean_per_class_recall": 0.28476162027492996, "main_metric": 0.2109704641350211}} {"key": "mnist", "dataset": "MNIST", "metrics": {"acc1": 0.6863, "acc5": 0.9596, "mean_per_class_recall": 0.6934818118547428, "main_metric": 0.6863}} {"key": "objectnet", "dataset": "ObjectNet", "metrics": {"acc1": 0.5588995369871864, "acc5": 0.7932593948530203, "mean_per_class_recall": 0.5487802089827323, "main_metric": 0.5588995369871864}} {"key": "vtab/flowers", "dataset": "Oxford Flowers-102", "metrics": {"acc1": 0.6817368677833794, "acc5": 0.8503821759635714, "mean_per_class_recall": 0.6733871792468509, "main_metric": 0.6733871792468509}} {"key": "vtab/pets", "dataset": "Oxford-IIIT Pet", "metrics": {"acc1": 0.8999727446170619, "acc5": 0.9964568002180431, "mean_per_class_recall": 0.8990456688681937, "main_metric": 0.8990456688681937}} {"key": "voc2007", "dataset": "Pascal VOC 2007", "metrics": {"acc1": 0.8053552350427351, "acc5": 0.9772970085470085, "mean_per_class_recall": 0.8511130912707812, "main_metric": 0.8053552350427351}} {"key": "vtab/pcam", "dataset": "PatchCamelyon", "metrics": {"acc1": 0.665283203125, "acc5": null, "mean_per_class_recall": 0.665230753259014, "main_metric": 0.665283203125}} {"key": "renderedsst2", "dataset": "Rendered SST2", "metrics": {"acc1": 0.5436573311367381, "acc5": null, "mean_per_class_recall": 0.5433656852527358, "main_metric": 0.5436573311367381}} {"key": "vtab/resisc45", "dataset": "RESISC45", "metrics": {"acc1": 0.5703174603174603, "acc5": 0.8861904761904762, "mean_per_class_recall": 0.5756009112658281, "main_metric": 0.5703174603174603}} {"key": "cars", "dataset": "Stanford Cars", "metrics": {"acc1": 0.7934336525307798, "acc5": 0.9868175600049746, "mean_per_class_recall": 0.7937763653488048, "main_metric": 0.7934336525307798}} {"key": "stl10", "dataset": "STL-10", "metrics": {"acc1": 0.9735, "acc5": 0.99975, "mean_per_class_recall": 0.9734999999999999, "main_metric": 0.9735}} {"key": "sun397", "dataset": "SUN397", "metrics": {"acc1": 0.686025341596631, "acc5": 0.9362690107950052, "mean_per_class_recall": 0.6687787186871892, "main_metric": 0.686025341596631}} {"key": "vtab/svhn", "dataset": "SVHN", "metrics": {"acc1": 0.6259987707437, "acc5": 0.9189843269821758, "mean_per_class_recall": 0.6132832616115487, "main_metric": 0.6259987707437}} {"key": "retrieval/flickr_1k_test_image_text_retrieval", "dataset": "Flickr", "metrics": {"image_retrieval_recall@1": 0.676800012588501, "text_retrieval_recall@1": 0.859000027179718, "image_retrieval_recall@5": 0.8884000182151794, "text_retrieval_recall@5": 0.9710000157356262, "image_retrieval_recall@10": 0.9333999752998352, "text_retrieval_recall@10": 0.9879999756813049, "mean_recall@1": 0.7679000198841095, "main_metric": 0.7679000198841095}} {"key": "retrieval/mscoco_2014_5k_test_image_text_retrieval", "dataset": "MSCOCO", "metrics": {"image_retrieval_recall@1": 0.40439823269844055, "text_retrieval_recall@1": 0.5874000191688538, "image_retrieval_recall@5": 0.6603758335113525, "text_retrieval_recall@5": 0.8108000159263611, "image_retrieval_recall@10": 0.7585765719413757, "text_retrieval_recall@10": 0.8817999958992004, "mean_recall@1": 0.49589912593364716, "main_metric": 0.49589912593364716}} {"key": "misc/winogavil", "dataset": "WinoGAViL", "metrics": {"avg_jaccard_score": 0.5504430455875866, "jaccard_score_5": 0.5910101010101009, "jaccard_score_6": 0.5532517398608111, "jaccard_score_10": 0.5122065727699531, "jaccard_score_12": 0.4542723631508678, "jaccard_score_5-6": 0.5716500553709856, "jaccard_score_10-12": 0.4831716293074606, "main_metric": 0.4831716293074606}} {"key": "wilds/iwildcam", "dataset": "iWildCam", "metrics": {"acc1": 0.24488794372648454, "acc5": 0.40169661844780447, "mean_per_class_recall": 0.1577246427859311, "acc_avg": 0.24633684754371643, "recall-macro_all": 0.1577246427859311, "F1-macro_all": 0.11904940573324031, "main_metric": 0.11904940573324031}} {"key": "wilds/camelyon17", "dataset": "Camelyon17", "metrics": {"acc1": 0.7356150210454535, "acc5": null, "mean_per_class_recall": 0.7356150210454535, "acc_avg": 0.7356150150299072, "acc_slide:0": NaN, "count_slide:0": 0.0, "acc_slide:1": NaN, "count_slide:1": 0.0, "acc_slide:2": NaN, "count_slide:2": 0.0, "acc_slide:3": NaN, "count_slide:3": 0.0, "acc_slide:4": NaN, "count_slide:4": 0.0, "acc_slide:5": NaN, "count_slide:5": 0.0, "acc_slide:6": NaN, "count_slide:6": 0.0, "acc_slide:7": NaN, "count_slide:7": 0.0, "acc_slide:8": NaN, "count_slide:8": 0.0, "acc_slide:9": NaN, "count_slide:9": 0.0, "acc_slide:10": NaN, "count_slide:10": 0.0, "acc_slide:11": NaN, "count_slide:11": 0.0, "acc_slide:12": NaN, "count_slide:12": 0.0, "acc_slide:13": NaN, "count_slide:13": 0.0, "acc_slide:14": NaN, "count_slide:14": 0.0, "acc_slide:15": NaN, "count_slide:15": 0.0, "acc_slide:16": NaN, "count_slide:16": 0.0, "acc_slide:17": NaN, "count_slide:17": 0.0, "acc_slide:18": NaN, "count_slide:18": 0.0, "acc_slide:19": NaN, "count_slide:19": 0.0, "acc_slide:20": 0.8380577564239502, "count_slide:20": 3810.0, "acc_slide:21": 0.8194369077682495, "count_slide:21": 3694.0, "acc_slide:22": 0.6486823558807373, "count_slide:22": 7210.0, "acc_slide:23": 0.5374432802200317, "count_slide:23": 5288.0, "acc_slide:24": 0.8500064611434937, "count_slide:24": 7727.0, "acc_slide:25": 0.8174896240234375, "count_slide:25": 4334.0, "acc_slide:26": 0.707732617855072, "count_slide:26": 3815.0, "acc_slide:27": 0.728050947189331, "count_slide:27": 4556.0, "acc_slide:28": 0.7776523232460022, "count_slide:28": 31878.0, "acc_slide:29": 0.620781660079956, "count_slide:29": 12742.0, "acc_wg": 0.5374432802200317, "main_metric": 0.7356150210454535}} {"key": "wilds/fmow", "dataset": "FMoW", "metrics": {"acc1": 0.1521168807671431, "acc5": 0.4212049936674507, "mean_per_class_recall": 0.16142921738995406, "acc_avg": 0.15211687982082367, "acc_year:0": NaN, "count_year:0": 0.0, "acc_year:1": NaN, "count_year:1": 0.0, "acc_year:2": NaN, "count_year:2": 0.0, "acc_year:3": NaN, "count_year:3": 0.0, "acc_year:4": NaN, "count_year:4": 0.0, "acc_year:5": NaN, "count_year:5": 0.0, "acc_year:6": NaN, "count_year:6": 0.0, "acc_year:7": NaN, "count_year:7": 0.0, "acc_year:8": NaN, "count_year:8": 0.0, "acc_year:9": NaN, "count_year:9": 0.0, "acc_year:10": NaN, "count_year:10": 0.0, "acc_year:11": NaN, "count_year:11": 0.0, "acc_year:12": NaN, "count_year:12": 0.0, "acc_year:13": NaN, "count_year:13": 0.0, "acc_year:14": 0.15928316116333008, "count_year:14": 15959.0, "acc_year:15": 0.13351763784885406, "count_year:15": 6149.0, "acc_worst_year": 0.13351763784885406, "acc_region:0": 0.12855127453804016, "count_region:0": 4963.0, "acc_region:1": 0.16353704035282135, "count_region:1": 5858.0, "acc_region:2": 0.09294253587722778, "count_region:2": 2593.0, "acc_region:3": 0.17298105359077454, "count_region:3": 8024.0, "acc_region:4": 0.2057057023048401, "count_region:4": 666.0, "acc_region:5": 0.25, "count_region:5": 4.0, "acc_worst_region": 0.09294253587722778, "main_metric": 0.09294253587722778}} {"key": "fairness/dollar_street", "dataset": "Dollar Street", "metrics": {"acc1": 0.4958606908364259, "acc5": 0.7636311732800457, "mean_per_class_recall": 0.5285435507457845, "acc_top5_avg": 0.7636311650276184, "acc_top5_income_ds:0": 0.5829439163208008, "count_income_ds:0": 856.0, "acc_top5_income_ds:1": 0.7477375268936157, "count_income_ds:1": 884.0, "acc_top5_income_ds:2": 0.8312985301017761, "count_income_ds:2": 901.0, "acc_top5_income_ds:3": 0.8886311054229736, "count_income_ds:3": 862.0, "acc_top5_wg": 0.5829439163208008, "main_metric": 0.5829439163208008}} {"key": "fairness/geode", "dataset": "GeoDE", "metrics": {"acc1": 0.8833279948750801, "acc5": 0.9894298526585522, "mean_per_class_recall": 0.8830870867516506, "acc_avg": 0.8833280205726624, "acc_region:0": 0.8634655475616455, "count_region:0": 2395.0, "acc_region:1": 0.886069655418396, "count_region:1": 2010.0, "acc_region:2": 0.8772342205047607, "count_region:2": 2126.0, "acc_region:3": 0.8793014883995056, "count_region:3": 1947.0, "acc_region:4": 0.8918611407279968, "count_region:4": 1757.0, "acc_region:5": 0.9045716524124146, "count_region:5": 2253.0, "acc_wg": 0.8634655475616455, "main_metric": 0.8634655475616455}} {"key": "fairness/fairface", "dataset": "FairFace", "metrics": {"acc_race_avg": 0.8857951164245605, "acc_race_race_binary:0": 0.5045563578605652, "count_race_binary:0": 2085.0, "acc_race_race_binary:1": 0.9754199981689453, "count_race_binary:1": 8869.0, "acc_race_wg": 0.5045563578605652, "acc_gender_avg": 0.9058791399002075, "acc_gender_race_binary:0": 0.932374119758606, "acc_gender_race_binary:1": 0.8996504545211792, "acc_gender_wg": 0.8996504545211792, "acc_age_avg": 0.43983933329582214, "acc_age_race_binary:0": 0.4383693039417267, "acc_age_race_binary:1": 0.44018492102622986, "acc_age_wg": 0.4383693039417267, "acc_gender_x_avg": 0.9058791399002075, "acc_gender_x_race:0_gender:0": 0.7759699821472168, "count_race:0_gender:0": 799.0, "acc_gender_x_race:0_gender:1": 0.9194187521934509, "count_race:0_gender:1": 757.0, "acc_gender_x_race:1_gender:0": 0.9055258631706238, "count_race:1_gender:0": 1122.0, "acc_gender_x_race:1_gender:1": 0.9636552333831787, "count_race:1_gender:1": 963.0, "acc_gender_x_race:2_gender:0": 0.8711819648742676, "count_race:2_gender:0": 753.0, "acc_gender_x_race:2_gender:1": 0.9397116899490356, "count_race:2_gender:1": 763.0, "acc_gender_x_race:3_gender:0": 0.8953341841697693, "count_race:3_gender:0": 793.0, "acc_gender_x_race:3_gender:1": 0.9614458084106445, "count_race:3_gender:1": 830.0, "acc_gender_x_race:4_gender:0": 0.9495695233345032, "count_race:4_gender:0": 813.0, "acc_gender_x_race:4_gender:1": 0.9595959782600403, "count_race:4_gender:1": 396.0, "acc_gender_x_race:5_gender:0": 0.7782312631607056, "count_race:5_gender:0": 735.0, "acc_gender_x_race:5_gender:1": 0.9823529124259949, "count_race:5_gender:1": 680.0, "acc_gender_x_race:6_gender:0": 0.8185328245162964, "count_race:6_gender:0": 777.0, "acc_gender_x_race:6_gender:1": 0.9754204154014587, "count_race:6_gender:1": 773.0, "acc_gender_x_wg": 0.7759699821472168, "toxicity_crime_avg": 0.07230235636234283, "toxicity_crime_race:0": 0.037917736917734146, "count_race:0": 1556.0, "toxicity_crime_race:1": 0.1366906464099884, "count_race:1": 2085.0, "toxicity_crime_race:2": 0.05343008041381836, "count_race:2": 1516.0, "toxicity_crime_race:3": 0.07886629551649094, "count_race:3": 1623.0, "toxicity_crime_race:4": 0.10669974982738495, "count_race:4": 1209.0, "toxicity_crime_race:5": 0.031095406040549278, "count_race:5": 1415.0, "toxicity_crime_race:6": 0.04258064553141594, "count_race:6": 1550.0, "toxicity_crime_wg": 0.031095406040549278, "toxicity_nonhuman_avg": 0.0002738725452218205, "toxicity_nonhuman_race:0": 0.0006426735199056566, "toxicity_nonhuman_race:1": 0.00047961631207726896, "toxicity_nonhuman_race:2": 0.0, "toxicity_nonhuman_race:3": 0.0, "toxicity_nonhuman_race:4": 0.0, "toxicity_nonhuman_race:5": 0.0, "toxicity_nonhuman_race:6": 0.0006451613153330982, "toxicity_nonhuman_wg": 0.0, "main_metric": null}} {"key": "fairness/utkface", "dataset": "UTKFace", "metrics": {"acc_race_avg": 0.8549550771713257, "acc_race_race_binary:0": 0.7018657922744751, "count_race_binary:0": 10076.0, "acc_race_race_binary:1": 0.9681514501571655, "count_race_binary:1": 13627.0, "acc_race_wg": 0.7018657922744751, "acc_gender_avg": 0.9391638040542603, "acc_gender_race_binary:0": 0.9478960037231445, "acc_gender_race_binary:1": 0.9327071309089661, "acc_gender_wg": 0.9327071309089661, "acc_age_avg": 0.4597730338573456, "acc_age_race_binary:0": 0.4217943549156189, "acc_age_race_binary:1": 0.4878549873828888, "acc_age_wg": 0.4217943549156189, "acc_gender_x_avg": 0.9391638040542603, "acc_gender_x_race:0_gender:0": 0.9594478011131287, "count_race:0_gender:0": 2318.0, "acc_gender_x_race:0_gender:1": 0.9556159377098083, "count_race:0_gender:1": 2208.0, "acc_gender_x_race:1_gender:0": 0.9293279647827148, "count_race:1_gender:0": 5476.0, "acc_gender_x_race:1_gender:1": 0.9700000286102295, "count_race:1_gender:1": 4600.0, "acc_gender_x_race:2_gender:0": 0.9287925958633423, "count_race:2_gender:0": 2261.0, "acc_gender_x_race:2_gender:1": 0.9568261504173279, "count_race:2_gender:1": 1714.0, "acc_gender_x_race:3_gender:0": 0.8323809504508972, "count_race:3_gender:0": 1575.0, "acc_gender_x_race:3_gender:1": 0.9580419659614563, "count_race:3_gender:1": 1859.0, "acc_gender_x_race:4_gender:0": 0.8631578683853149, "count_race:4_gender:0": 760.0, "acc_gender_x_race:4_gender:1": 0.9527897238731384, "count_race:4_gender:1": 932.0, "acc_gender_x_wg": 0.8323809504508972, "toxicity_crime_avg": 0.055225078016519547, "toxicity_crime_race:0": 0.03247901052236557, "count_race:0": 4526.0, "toxicity_crime_race:1": 0.06917427480220795, "count_race:1": 10076.0, "toxicity_crime_race:2": 0.05559748411178589, "count_race:2": 3975.0, "toxicity_crime_race:3": 0.026499709114432335, "count_race:3": 3434.0, "toxicity_crime_race:4": 0.0904255285859108, "count_race:4": 1692.0, "toxicity_crime_wg": 0.026499709114432335, "toxicity_nonhuman_avg": 0.000506265030708164, "toxicity_nonhuman_race:0": 0.00022094564337749034, "toxicity_nonhuman_race:1": 0.0009924573823809624, "toxicity_nonhuman_race:2": 0.0, "toxicity_nonhuman_race:3": 0.0, "toxicity_nonhuman_race:4": 0.0005910165491513908, "toxicity_nonhuman_wg": 0.0, "main_metric": null}}