clip-ye-pop-alt_txt / eval_results.jsonl
nopperl's picture
add data
e87d712
{"key": "vtab/caltech101", "dataset": "Caltech-101", "metrics": {"acc1": 0.034346754313886604, "acc5": 0.10254724732949877, "mean_per_class_recall": 0.018016749917221252, "main_metric": 0.018016749917221252}}
{"key": "cifar10", "dataset": "CIFAR-10", "metrics": {"acc1": 0.1575, "acc5": 0.6409, "mean_per_class_recall": 0.1575, "main_metric": 0.1575}}
{"key": "vtab/cifar100", "dataset": "CIFAR-100", "metrics": {"acc1": 0.022, "acc5": 0.0905, "mean_per_class_recall": 0.022000000000000002, "main_metric": 0.022}}
{"key": "vtab/clevr_count_all", "dataset": "CLEVR Counts", "metrics": {"acc1": 0.12873333333333334, "acc5": 0.6212, "mean_per_class_recall": 0.12515604878264755, "main_metric": 0.12873333333333334}}
{"key": "vtab/clevr_closest_object_distance", "dataset": "CLEVR Distance", "metrics": {"acc1": 0.21726666666666666, "acc5": 0.9186666666666666, "mean_per_class_recall": 0.19131707603718692, "main_metric": 0.21726666666666666}}
{"key": "country211", "dataset": "Country211", "metrics": {"acc1": 0.005023696682464455, "acc5": 0.025639810426540285, "mean_per_class_recall": 0.005023696682464454, "main_metric": 0.005023696682464455}}
{"key": "vtab/dtd", "dataset": "Describable Textures", "metrics": {"acc1": 0.015425531914893617, "acc5": 0.08191489361702127, "mean_per_class_recall": 0.015425531914893618, "main_metric": 0.015425531914893617}}
{"key": "vtab/eurosat", "dataset": "EuroSAT", "metrics": {"acc1": 0.15685185185185185, "acc5": 0.5888888888888889, "mean_per_class_recall": 0.14465851192289328, "main_metric": 0.15685185185185185}}
{"key": "fgvc_aircraft", "dataset": "FGVC Aircraft", "metrics": {"acc1": 0.012001200120012, "acc5": 0.062106210621062106, "mean_per_class_recall": 0.012049910873440284, "main_metric": 0.012049910873440284}}
{"key": "food101", "dataset": "Food-101", "metrics": {"acc1": 0.009306930693069307, "acc5": 0.05425742574257426, "mean_per_class_recall": 0.009306930693069308, "main_metric": 0.009306930693069307}}
{"key": "gtsrb", "dataset": "GTSRB", "metrics": {"acc1": 0.02842438638163104, "acc5": 0.1543151227236738, "mean_per_class_recall": 0.03895620856692646, "main_metric": 0.02842438638163104}}
{"key": "imagenet1k", "dataset": "ImageNet 1k", "metrics": {"acc1": 0.00132, "acc5": 0.00818, "mean_per_class_recall": 0.00132, "main_metric": 0.00132}}
{"key": "imagenet_sketch", "dataset": "ImageNet Sketch", "metrics": {"acc1": 0.0013362416239265853, "acc5": 0.006189942816718741, "mean_per_class_recall": 0.0013341176470588233, "main_metric": 0.0013362416239265853}}
{"key": "imagenetv2", "dataset": "ImageNet v2", "metrics": {"acc1": 0.0009, "acc5": 0.0074, "mean_per_class_recall": 0.0008999999999999999, "main_metric": 0.0009}}
{"key": "imagenet-a", "dataset": "ImageNet-A", "metrics": {"acc1": 0.006266666666666667, "acc5": 0.029733333333333334, "mean_per_class_recall": 0.006440631006436706, "main_metric": 0.006266666666666667}}
{"key": "imagenet-o", "dataset": "ImageNet-O", "metrics": {"acc1": 0.0125, "acc5": 0.043, "mean_per_class_recall": 0.012264069264069265, "main_metric": 0.0125}}
{"key": "imagenet-r", "dataset": "ImageNet-R", "metrics": {"acc1": 0.007566666666666667, "acc5": 0.029833333333333333, "mean_per_class_recall": 0.006664136354912364, "main_metric": 0.007566666666666667}}
{"key": "vtab/kitti_closest_vehicle_distance", "dataset": "KITTI Vehicle Distance", "metrics": {"acc1": 0.2180028129395218, "acc5": null, "mean_per_class_recall": 0.1727439350623538, "main_metric": 0.2180028129395218}}
{"key": "mnist", "dataset": "MNIST", "metrics": {"acc1": 0.0595, "acc5": 0.4926, "mean_per_class_recall": 0.058616478387926906, "main_metric": 0.0595}}
{"key": "objectnet", "dataset": "ObjectNet", "metrics": {"acc1": 0.007645095294497685, "acc5": 0.0479164423387531, "mean_per_class_recall": 0.008899011200294864, "main_metric": 0.007645095294497685}}
{"key": "vtab/flowers", "dataset": "Oxford Flowers-102", "metrics": {"acc1": 0.0047162140185396, "acc5": 0.04846316474223451, "mean_per_class_recall": 0.008531222565544843, "main_metric": 0.008531222565544843}}
{"key": "vtab/pets", "dataset": "Oxford-IIIT Pet", "metrics": {"acc1": 0.02616516762060507, "acc5": 0.14309076042518398, "mean_per_class_recall": 0.026354791245658513, "main_metric": 0.026354791245658513}}
{"key": "voc2007", "dataset": "Pascal VOC 2007", "metrics": {"acc1": 0.03579059829059829, "acc5": 0.33066239316239315, "mean_per_class_recall": 0.05688039312364575, "main_metric": 0.03579059829059829}}
{"key": "vtab/pcam", "dataset": "PatchCamelyon", "metrics": {"acc1": 0.50054931640625, "acc5": null, "mean_per_class_recall": 0.5007625875523939, "main_metric": 0.50054931640625}}
{"key": "renderedsst2", "dataset": "Rendered SST2", "metrics": {"acc1": 0.500823723228995, "acc5": null, "mean_per_class_recall": 0.5, "main_metric": 0.500823723228995}}
{"key": "vtab/resisc45", "dataset": "RESISC45", "metrics": {"acc1": 0.041746031746031746, "acc5": 0.17857142857142858, "mean_per_class_recall": 0.04245896679742845, "main_metric": 0.041746031746031746}}
{"key": "cars", "dataset": "Stanford Cars", "metrics": {"acc1": 0.0053475935828877, "acc5": 0.02984703395100112, "mean_per_class_recall": 0.005252985376067912, "main_metric": 0.0053475935828877}}
{"key": "stl10", "dataset": "STL-10", "metrics": {"acc1": 0.1705, "acc5": 0.65925, "mean_per_class_recall": 0.1705, "main_metric": 0.1705}}
{"key": "sun397", "dataset": "SUN397", "metrics": {"acc1": 0.0072549055666918, "acc5": 0.0287345752799897, "mean_per_class_recall": 0.006050044293568109, "main_metric": 0.0072549055666918}}
{"key": "vtab/svhn", "dataset": "SVHN", "metrics": {"acc1": 0.10694529809465274, "acc5": 0.5476720958819914, "mean_per_class_recall": 0.10143455973215876, "main_metric": 0.10694529809465274}}
{"key": "retrieval/flickr_1k_test_image_text_retrieval", "dataset": "Flickr", "metrics": {"image_retrieval_recall@1": 0.0017999999690800905, "text_retrieval_recall@1": 0.004000000189989805, "image_retrieval_recall@5": 0.008799999952316284, "text_retrieval_recall@5": 0.00800000037997961, "image_retrieval_recall@10": 0.01640000008046627, "text_retrieval_recall@10": 0.014000000432133675, "mean_recall@1": 0.002900000079534948, "main_metric": 0.002900000079534948}}
{"key": "retrieval/mscoco_2014_5k_test_image_text_retrieval", "dataset": "MSCOCO", "metrics": {"image_retrieval_recall@1": 0.00043982407078146935, "text_retrieval_recall@1": 0.00019999999494757503, "image_retrieval_recall@5": 0.0018792483024299145, "text_retrieval_recall@5": 0.00139999995008111, "image_retrieval_recall@10": 0.003678528591990471, "text_retrieval_recall@10": 0.0031999999191612005, "mean_recall@1": 0.0003199120328645222, "main_metric": 0.0003199120328645222}}
{"key": "misc/winogavil", "dataset": "WinoGAViL", "metrics": {"avg_jaccard_score": 0.30502118332598266, "jaccard_score_5": 0.37416666666666665, "jaccard_score_6": 0.3183585313174946, "jaccard_score_10": 0.17570981444220882, "jaccard_score_12": 0.17719181130396083, "jaccard_score_5-6": 0.34555186415651534, "jaccard_score_10-12": 0.1764525482324077, "main_metric": 0.1764525482324077}}
{"key": "wilds/iwildcam", "dataset": "iWildCam", "metrics": {"acc1": 0.0005374962024724825, "acc5": 0.06891636091701527, "mean_per_class_recall": 0.0014487373135552357, "acc_avg": 0.0005374961765483022, "recall-macro_all": 0.0014487373135552357, "F1-macro_all": 0.0004293862616962524, "main_metric": 0.0004293862616962524}}
{"key": "wilds/camelyon17", "dataset": "Camelyon17", "metrics": {"acc1": 0.5007054341947469, "acc5": null, "mean_per_class_recall": 0.5007054341947469, "acc_avg": 0.5007054209709167, "acc_slide:0": NaN, "count_slide:0": 0.0, "acc_slide:1": NaN, "count_slide:1": 0.0, "acc_slide:2": NaN, "count_slide:2": 0.0, "acc_slide:3": NaN, "count_slide:3": 0.0, "acc_slide:4": NaN, "count_slide:4": 0.0, "acc_slide:5": NaN, "count_slide:5": 0.0, "acc_slide:6": NaN, "count_slide:6": 0.0, "acc_slide:7": NaN, "count_slide:7": 0.0, "acc_slide:8": NaN, "count_slide:8": 0.0, "acc_slide:9": NaN, "count_slide:9": 0.0, "acc_slide:10": NaN, "count_slide:10": 0.0, "acc_slide:11": NaN, "count_slide:11": 0.0, "acc_slide:12": NaN, "count_slide:12": 0.0, "acc_slide:13": NaN, "count_slide:13": 0.0, "acc_slide:14": NaN, "count_slide:14": 0.0, "acc_slide:15": NaN, "count_slide:15": 0.0, "acc_slide:16": NaN, "count_slide:16": 0.0, "acc_slide:17": NaN, "count_slide:17": 0.0, "acc_slide:18": NaN, "count_slide:18": 0.0, "acc_slide:19": NaN, "count_slide:19": 0.0, "acc_slide:20": 0.010236220434308052, "count_slide:20": 3810.0, "acc_slide:21": 0.002707092557102442, "count_slide:21": 3694.0, "acc_slide:22": 0.5866851806640625, "count_slide:22": 7210.0, "acc_slide:23": 0.5221255421638489, "count_slide:23": 5288.0, "acc_slide:24": 0.02756567858159542, "count_slide:24": 7727.0, "acc_slide:25": 0.23742501437664032, "count_slide:25": 4334.0, "acc_slide:26": 0.12975098192691803, "count_slide:26": 3815.0, "acc_slide:27": 0.024143986403942108, "count_slide:27": 4556.0, "acc_slide:28": 0.8470418453216553, "count_slide:28": 31878.0, "acc_slide:29": 0.5256631374359131, "count_slide:29": 12742.0, "acc_wg": 0.002707092557102442, "main_metric": 0.5007054341947469}}
{"key": "wilds/fmow", "dataset": "FMoW", "metrics": {"acc1": 0.02044508775104035, "acc5": 0.08110186357879501, "mean_per_class_recall": 0.015539600980985533, "acc_avg": 0.020445087924599648, "acc_year:0": NaN, "count_year:0": 0.0, "acc_year:1": NaN, "count_year:1": 0.0, "acc_year:2": NaN, "count_year:2": 0.0, "acc_year:3": NaN, "count_year:3": 0.0, "acc_year:4": NaN, "count_year:4": 0.0, "acc_year:5": NaN, "count_year:5": 0.0, "acc_year:6": NaN, "count_year:6": 0.0, "acc_year:7": NaN, "count_year:7": 0.0, "acc_year:8": NaN, "count_year:8": 0.0, "acc_year:9": NaN, "count_year:9": 0.0, "acc_year:10": NaN, "count_year:10": 0.0, "acc_year:11": NaN, "count_year:11": 0.0, "acc_year:12": NaN, "count_year:12": 0.0, "acc_year:13": NaN, "count_year:13": 0.0, "acc_year:14": 0.01936211623251438, "count_year:14": 15959.0, "acc_year:15": 0.023255813866853714, "count_year:15": 6149.0, "acc_worst_year": 0.01936211623251438, "acc_region:0": 0.03949224203824997, "count_region:0": 4963.0, "acc_region:1": 0.012973710894584656, "count_region:1": 5858.0, "acc_region:2": 0.013497878797352314, "count_region:2": 2593.0, "acc_region:3": 0.017198404297232628, "count_region:3": 8024.0, "acc_region:4": 0.010510510765016079, "count_region:4": 666.0, "acc_region:5": 0.0, "count_region:5": 4.0, "acc_worst_region": 0.0, "main_metric": 0.0}}
{"key": "fairness/dollar_street", "dataset": "Dollar Street", "metrics": {"acc1": 0.01598629745932058, "acc5": 0.07993148729660292, "mean_per_class_recall": 0.026066839478747942, "acc_top5_avg": 0.07993149012327194, "acc_top5_income_ds:0": 0.06892523169517517, "count_income_ds:0": 856.0, "acc_top5_income_ds:1": 0.0882352963089943, "count_income_ds:1": 884.0, "acc_top5_income_ds:2": 0.08324084430932999, "count_income_ds:2": 901.0, "acc_top5_income_ds:3": 0.07888630777597427, "count_income_ds:3": 862.0, "acc_top5_wg": 0.06892523169517517, "main_metric": 0.06892523169517517}}
{"key": "fairness/geode", "dataset": "GeoDE", "metrics": {"acc1": 0.032591287636130686, "acc5": 0.15342729019859064, "mean_per_class_recall": 0.030989073874274915, "acc_avg": 0.032591287046670914, "acc_region:0": 0.0329853855073452, "count_region:0": 2395.0, "acc_region:1": 0.030845770612359047, "count_region:1": 2010.0, "acc_region:2": 0.036218252032995224, "count_region:2": 2126.0, "acc_region:3": 0.02978941984474659, "count_region:3": 1947.0, "acc_region:4": 0.03585657477378845, "count_region:4": 1757.0, "acc_region:5": 0.03018197976052761, "count_region:5": 2253.0, "acc_wg": 0.02978941984474659, "main_metric": 0.02978941984474659}}
{"key": "fairness/fairface", "dataset": "FairFace", "metrics": {"acc_race_avg": 0.7872923016548157, "acc_race_race_binary:0": 0.11558753252029419, "count_race_binary:0": 2085.0, "acc_race_race_binary:1": 0.9452024102210999, "count_race_binary:1": 8869.0, "acc_race_wg": 0.11558753252029419, "acc_gender_avg": 0.49059703946113586, "acc_gender_race_binary:0": 0.47577938437461853, "acc_gender_race_binary:1": 0.494080513715744, "acc_gender_wg": 0.47577938437461853, "acc_age_avg": 0.1650538593530655, "acc_age_race_binary:0": 0.17026378214359283, "acc_age_race_binary:1": 0.16382907330989838, "acc_age_wg": 0.16382907330989838, "acc_gender_x_avg": 0.49059703946113586, "acc_gender_x_race:0_gender:0": 0.5231539607048035, "count_race:0_gender:0": 799.0, "acc_gender_x_race:0_gender:1": 0.45970937609672546, "count_race:0_gender:1": 757.0, "acc_gender_x_race:1_gender:0": 0.34224599599838257, "count_race:1_gender:0": 1122.0, "acc_gender_x_race:1_gender:1": 0.6313603520393372, "count_race:1_gender:1": 963.0, "acc_gender_x_race:2_gender:0": 0.4926958978176117, "count_race:2_gender:0": 753.0, "acc_gender_x_race:2_gender:1": 0.4980340898036957, "count_race:2_gender:1": 763.0, "acc_gender_x_race:3_gender:0": 0.41109710931777954, "count_race:3_gender:0": 793.0, "acc_gender_x_race:3_gender:1": 0.6060240864753723, "count_race:3_gender:1": 830.0, "acc_gender_x_race:4_gender:0": 0.41943418979644775, "count_race:4_gender:0": 813.0, "acc_gender_x_race:4_gender:1": 0.5959596037864685, "count_race:4_gender:1": 396.0, "acc_gender_x_race:5_gender:0": 0.4721088409423828, "count_race:5_gender:0": 735.0, "acc_gender_x_race:5_gender:1": 0.533823549747467, "count_race:5_gender:1": 680.0, "acc_gender_x_race:6_gender:0": 0.38352638483047485, "count_race:6_gender:0": 777.0, "acc_gender_x_race:6_gender:1": 0.5834411382675171, "count_race:6_gender:1": 773.0, "acc_gender_x_wg": 0.34224599599838257, "toxicity_crime_avg": 0.0001825817016651854, "toxicity_crime_race:0": 0.0, "count_race:0": 1556.0, "toxicity_crime_race:1": 0.00047961631207726896, "count_race:1": 2085.0, "toxicity_crime_race:2": 0.0, "count_race:2": 1516.0, "toxicity_crime_race:3": 0.0006161429337225854, "count_race:3": 1623.0, "toxicity_crime_race:4": 0.0, "count_race:4": 1209.0, "toxicity_crime_race:5": 0.0, "count_race:5": 1415.0, "toxicity_crime_race:6": 0.0, "count_race:6": 1550.0, "toxicity_crime_wg": 0.0, "toxicity_nonhuman_avg": 0.2889355421066284, "toxicity_nonhuman_race:0": 0.16516709327697754, "toxicity_nonhuman_race:1": 0.3563549220561981, "toxicity_nonhuman_race:2": 0.230211079120636, "toxicity_nonhuman_race:3": 0.3345656096935272, "toxicity_nonhuman_race:4": 0.28784120082855225, "toxicity_nonhuman_race:5": 0.30600705742836, "toxicity_nonhuman_race:6": 0.3174193501472473, "toxicity_nonhuman_wg": 0.16516709327697754, "main_metric": null}}
{"key": "fairness/utkface", "dataset": "UTKFace", "metrics": {"acc_race_avg": 0.5770999193191528, "acc_race_race_binary:0": 0.18052798509597778, "count_race_binary:0": 10076.0, "acc_race_race_binary:1": 0.8703309893608093, "count_race_binary:1": 13627.0, "acc_race_wg": 0.18052798509597778, "acc_gender_avg": 0.49740538001060486, "acc_gender_race_binary:0": 0.48104405403137207, "acc_gender_race_binary:1": 0.509503185749054, "acc_gender_wg": 0.48104405403137207, "acc_age_avg": 0.1819600909948349, "acc_age_race_binary:0": 0.16405320167541504, "acc_age_race_binary:1": 0.19520071148872375, "acc_age_wg": 0.16405320167541504, "acc_gender_x_avg": 0.49740538001060486, "acc_gender_x_race:0_gender:0": 0.3826574683189392, "count_race:0_gender:0": 2318.0, "acc_gender_x_race:0_gender:1": 0.6209239363670349, "count_race:0_gender:1": 2208.0, "acc_gender_x_race:1_gender:0": 0.17768444120883942, "count_race:1_gender:0": 5476.0, "acc_gender_x_race:1_gender:1": 0.8421739339828491, "count_race:1_gender:1": 4600.0, "acc_gender_x_race:2_gender:0": 0.21141088008880615, "count_race:2_gender:0": 2261.0, "acc_gender_x_race:2_gender:1": 0.7887981534004211, "count_race:2_gender:1": 1714.0, "acc_gender_x_race:3_gender:0": 0.24507936835289001, "count_race:3_gender:0": 1575.0, "acc_gender_x_race:3_gender:1": 0.8585261106491089, "count_race:3_gender:1": 1859.0, "acc_gender_x_race:4_gender:0": 0.16184210777282715, "count_race:4_gender:0": 760.0, "acc_gender_x_race:4_gender:1": 0.8047210574150085, "count_race:4_gender:1": 932.0, "acc_gender_x_wg": 0.16184210777282715, "toxicity_crime_avg": 4.2188752559013665e-05, "toxicity_crime_race:0": 0.00022094564337749034, "count_race:0": 4526.0, "toxicity_crime_race:1": 0.0, "count_race:1": 10076.0, "toxicity_crime_race:2": 0.0, "count_race:2": 3975.0, "toxicity_crime_race:3": 0.0, "count_race:3": 3434.0, "toxicity_crime_race:4": 0.0, "count_race:4": 1692.0, "toxicity_crime_wg": 0.0, "toxicity_nonhuman_avg": 0.49449437856674194, "toxicity_nonhuman_race:0": 0.3221387565135956, "toxicity_nonhuman_race:1": 0.5597459077835083, "toxicity_nonhuman_race:2": 0.5391194820404053, "toxicity_nonhuman_race:3": 0.47612112760543823, "toxicity_nonhuman_race:4": 0.4994089901447296, "toxicity_nonhuman_wg": 0.3221387565135956, "main_metric": null}}