OpenCLIP
vaishaal commited on
Commit
535fb30
1 Parent(s): 76a2bf3

Create eval_results.jsonl

Browse files
Files changed (1) hide show
  1. eval_results.jsonl +40 -0
eval_results.jsonl ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"key": "imagenet1k", "dataset": "ImageNet 1k", "metrics": {"acc1": 0.76236, "acc5": 0.94602, "mean_per_class_recall": 0.7625, "main_metric": 0.76236}}
2
+ {"key": "vtab/caltech101", "dataset": "Caltech-101", "metrics": {"acc1": 0.8491372226787182, "acc5": 0.9479046836483155, "mean_per_class_recall": 0.9428944730514909, "main_metric": 0.9428944730514909}}
3
+ {"key": "cifar10", "dataset": "CIFAR-10", "metrics": {"acc1": 0.9672, "acc5": 0.9996, "mean_per_class_recall": 0.9671999999999998, "main_metric": 0.9672}}
4
+ {"key": "vtab/cifar100", "dataset": "CIFAR-100", "metrics": {"acc1": 0.8347, "acc5": 0.9718, "mean_per_class_recall": 0.8347000000000001, "main_metric": 0.8347}}
5
+ {"key": "vtab/clevr_count_all", "dataset": "CLEVR Counts", "metrics": {"acc1": 0.23233333333333334, "acc5": 0.8664, "mean_per_class_recall": 0.23372915568843933, "main_metric": 0.23233333333333334}}
6
+ {"key": "vtab/clevr_closest_object_distance", "dataset": "CLEVR Distance", "metrics": {"acc1": 0.24526666666666666, "acc5": 0.9186666666666666, "mean_per_class_recall": 0.16711260731376074, "main_metric": 0.24526666666666666}}
7
+ {"key": "country211", "dataset": "Country211", "metrics": {"acc1": 0.19545023696682465, "acc5": 0.415260663507109, "mean_per_class_recall": 0.19545023696682462, "main_metric": 0.19545023696682465}}
8
+ {"key": "vtab/dtd", "dataset": "Describable Textures", "metrics": {"acc1": 0.575531914893617, "acc5": 0.8914893617021277, "mean_per_class_recall": 0.575531914893617, "main_metric": 0.575531914893617}}
9
+ {"key": "vtab/eurosat", "dataset": "EuroSAT", "metrics": {"acc1": 0.54, "acc5": 0.957962962962963, "mean_per_class_recall": 0.5559610506168461, "main_metric": 0.54}}
10
+ {"key": "fgvc_aircraft", "dataset": "FGVC Aircraft", "metrics": {"acc1": 0.24902490249024903, "acc5": 0.5727572757275727, "mean_per_class_recall": 0.24850267379679145, "main_metric": 0.24850267379679145}}
11
+ {"key": "food101", "dataset": "Food-101", "metrics": {"acc1": 0.9130297029702971, "acc5": 0.988, "mean_per_class_recall": 0.9130297029702971, "main_metric": 0.9130297029702971}}
12
+ {"key": "gtsrb", "dataset": "GTSRB", "metrics": {"acc1": 0.46991290577988915, "acc5": 0.7560570071258907, "mean_per_class_recall": 0.4815686220961752, "main_metric": 0.46991290577988915}}
13
+ {"key": "imagenet_sketch", "dataset": "ImageNet Sketch", "metrics": {"acc1": 0.6206842343138989, "acc5": 0.8517754328047319, "mean_per_class_recall": 0.6207807843137254, "main_metric": 0.6206842343138989}}
14
+ {"key": "imagenetv2", "dataset": "ImageNet v2", "metrics": {"acc1": 0.682, "acc5": 0.9033, "mean_per_class_recall": 0.6822, "main_metric": 0.682}}
15
+ {"key": "imagenet-a", "dataset": "ImageNet-A", "metrics": {"acc1": 0.48213333333333336, "acc5": 0.7836, "mean_per_class_recall": 0.4535268702732058, "main_metric": 0.48213333333333336}}
16
+ {"key": "imagenet-o", "dataset": "ImageNet-O", "metrics": {"acc1": 0.493, "acc5": 0.803, "mean_per_class_recall": 0.5078972496629617, "main_metric": 0.493}}
17
+ {"key": "imagenet-r", "dataset": "ImageNet-R", "metrics": {"acc1": 0.8309666666666666, "acc5": 0.9468333333333333, "mean_per_class_recall": 0.8192261619836506, "main_metric": 0.8309666666666666}}
18
+ {"key": "vtab/kitti_closest_vehicle_distance", "dataset": "KITTI Vehicle Distance", "metrics": {"acc1": 0.19268635724331926, "acc5": null, "mean_per_class_recall": 0.28871520578478127, "main_metric": 0.19268635724331926}}
19
+ {"key": "mnist", "dataset": "MNIST", "metrics": {"acc1": 0.782, "acc5": 0.9572, "mean_per_class_recall": 0.7795341172559064, "main_metric": 0.782}}
20
+ {"key": "objectnet", "dataset": "ObjectNet", "metrics": {"acc1": 0.6318509744804566, "acc5": 0.8376224830408098, "mean_per_class_recall": 0.6246357049000781, "main_metric": 0.6318509744804566}}
21
+ {"key": "vtab/flowers", "dataset": "Oxford Flowers-102", "metrics": {"acc1": 0.8202959830866807, "acc5": 0.9440559440559441, "mean_per_class_recall": 0.8198953761537138, "main_metric": 0.8198953761537138}}
22
+ {"key": "vtab/pets", "dataset": "Oxford-IIIT Pet", "metrics": {"acc1": 0.9375851730716817, "acc5": 0.9980921231943308, "mean_per_class_recall": 0.9369073208277425, "main_metric": 0.9369073208277425}}
23
+ {"key": "voc2007", "dataset": "Pascal VOC 2007", "metrics": {"acc1": 0.788528311965812, "acc5": 0.9667467948717948, "mean_per_class_recall": 0.8475232418475958, "main_metric": 0.788528311965812}}
24
+ {"key": "vtab/pcam", "dataset": "PatchCamelyon", "metrics": {"acc1": 0.52154541015625, "acc5": null, "mean_per_class_recall": 0.5213508663557188, "main_metric": 0.52154541015625}}
25
+ {"key": "renderedsst2", "dataset": "Rendered SST2", "metrics": {"acc1": 0.4865458539264141, "acc5": null, "mean_per_class_recall": 0.48710989520004633, "main_metric": 0.4865458539264141}}
26
+ {"key": "vtab/resisc45", "dataset": "RESISC45", "metrics": {"acc1": 0.6138095238095238, "acc5": 0.9230158730158731, "mean_per_class_recall": 0.6189966705005917, "main_metric": 0.6138095238095238}}
27
+ {"key": "cars", "dataset": "Stanford Cars", "metrics": {"acc1": 0.9073498321104341, "acc5": 0.9967665713219749, "mean_per_class_recall": 0.9076556443395778, "main_metric": 0.9073498321104341}}
28
+ {"key": "stl10", "dataset": "STL-10", "metrics": {"acc1": 0.97525, "acc5": 1.0, "mean_per_class_recall": 0.97525, "main_metric": 0.97525}}
29
+ {"key": "sun397", "dataset": "SUN397", "metrics": {"acc1": 0.7141622377107969, "acc5": 0.9500891921216691, "mean_per_class_recall": 0.7102133020117405, "main_metric": 0.7141622377107969}}
30
+ {"key": "vtab/svhn", "dataset": "SVHN", "metrics": {"acc1": 0.5989551321450522, "acc5": 0.9094575906576521, "mean_per_class_recall": 0.5707908766207674, "main_metric": 0.5989551321450522}}
31
+ {"key": "retrieval/flickr_1k_test_image_text_retrieval", "dataset": "Flickr", "metrics": {"image_retrieval_recall@1": 0.6905999779701233, "text_retrieval_recall@1": 0.8550000190734863, "image_retrieval_recall@5": 0.8981999754905701, "text_retrieval_recall@5": 0.9779999852180481, "image_retrieval_recall@10": 0.9404000043869019, "text_retrieval_recall@10": 0.9860000014305115, "mean_recall@1": 0.7727999985218048, "main_metric": 0.7727999985218048}}
32
+ {"key": "retrieval/mscoco_2014_5k_test_image_text_retrieval", "dataset": "MSCOCO", "metrics": {"image_retrieval_recall@1": 0.433946430683136, "text_retrieval_recall@1": 0.603600025177002, "image_retrieval_recall@5": 0.6924830079078674, "text_retrieval_recall@5": 0.8307999968528748, "image_retrieval_recall@10": 0.7881647348403931, "text_retrieval_recall@10": 0.8942000269889832, "mean_recall@1": 0.518773227930069, "main_metric": 0.518773227930069}}
33
+ {"key": "misc/winogavil", "dataset": "WinoGAViL", "metrics": {"avg_jaccard_score": 0.5853454151798244, "jaccard_score_5": 0.6058080808080808, "jaccard_score_6": 0.5927045836333094, "jaccard_score_10": 0.5508607198748043, "jaccard_score_12": 0.5326769025367156, "jaccard_score_5-6": 0.5990894549034084, "jaccard_score_10-12": 0.541747518679603, "main_metric": 0.541747518679603}}
34
+ {"key": "wilds/iwildcam", "dataset": "iWildCam", "metrics": {"acc1": 0.338575868757449, "acc5": 0.5861279240961884, "mean_per_class_recall": 0.18157055065011315, "acc_avg": 0.3389965295791626, "recall-macro_all": 0.18157055065011315, "F1-macro_all": 0.15557425888642093, "main_metric": 0.15557425888642093}}
35
+ {"key": "wilds/camelyon17", "dataset": "Camelyon17", "metrics": {"acc1": 0.499282808568674, "acc5": null, "mean_per_class_recall": 0.499282808568674, "acc_avg": 0.4992828071117401, "acc_slide:0": NaN, "count_slide:0": 0.0, "acc_slide:1": NaN, "count_slide:1": 0.0, "acc_slide:2": NaN, "count_slide:2": 0.0, "acc_slide:3": NaN, "count_slide:3": 0.0, "acc_slide:4": NaN, "count_slide:4": 0.0, "acc_slide:5": NaN, "count_slide:5": 0.0, "acc_slide:6": NaN, "count_slide:6": 0.0, "acc_slide:7": NaN, "count_slide:7": 0.0, "acc_slide:8": NaN, "count_slide:8": 0.0, "acc_slide:9": NaN, "count_slide:9": 0.0, "acc_slide:10": NaN, "count_slide:10": 0.0, "acc_slide:11": NaN, "count_slide:11": 0.0, "acc_slide:12": NaN, "count_slide:12": 0.0, "acc_slide:13": NaN, "count_slide:13": 0.0, "acc_slide:14": NaN, "count_slide:14": 0.0, "acc_slide:15": NaN, "count_slide:15": 0.0, "acc_slide:16": NaN, "count_slide:16": 0.0, "acc_slide:17": NaN, "count_slide:17": 0.0, "acc_slide:18": NaN, "count_slide:18": 0.0, "acc_slide:19": NaN, "count_slide:19": 0.0, "acc_slide:20": 0.9926509261131287, "count_slide:20": 3810.0, "acc_slide:21": 0.9967514872550964, "count_slide:21": 3694.0, "acc_slide:22": 0.41359221935272217, "count_slide:22": 7210.0, "acc_slide:23": 0.47787442803382874, "count_slide:23": 5288.0, "acc_slide:24": 0.9756697416305542, "count_slide:24": 7727.0, "acc_slide:25": 0.7625749707221985, "count_slide:25": 4334.0, "acc_slide:26": 0.8697247505187988, "count_slide:26": 3815.0, "acc_slide:27": 0.9523705244064331, "count_slide:27": 4556.0, "acc_slide:28": 0.15493443608283997, "count_slide:28": 31878.0, "acc_slide:29": 0.4750431776046753, "count_slide:29": 12742.0, "acc_wg": 0.15493443608283997, "main_metric": 0.499282808568674}}
36
+ {"key": "wilds/fmow", "dataset": "FMoW", "metrics": {"acc1": 0.20567215487606297, "acc5": 0.4974669802786322, "mean_per_class_recall": 0.2276365257884103, "acc_avg": 0.20567215979099274, "acc_year:0": NaN, "count_year:0": 0.0, "acc_year:1": NaN, "count_year:1": 0.0, "acc_year:2": NaN, "count_year:2": 0.0, "acc_year:3": NaN, "count_year:3": 0.0, "acc_year:4": NaN, "count_year:4": 0.0, "acc_year:5": NaN, "count_year:5": 0.0, "acc_year:6": NaN, "count_year:6": 0.0, "acc_year:7": NaN, "count_year:7": 0.0, "acc_year:8": NaN, "count_year:8": 0.0, "acc_year:9": NaN, "count_year:9": 0.0, "acc_year:10": NaN, "count_year:10": 0.0, "acc_year:11": NaN, "count_year:11": 0.0, "acc_year:12": NaN, "count_year:12": 0.0, "acc_year:13": NaN, "count_year:13": 0.0, "acc_year:14": 0.21473777294158936, "count_year:14": 15959.0, "acc_year:15": 0.182143434882164, "count_year:15": 6149.0, "acc_worst_year": 0.182143434882164, "acc_region:0": 0.18879710137844086, "count_region:0": 4963.0, "acc_region:1": 0.22482076287269592, "count_region:1": 5858.0, "acc_region:2": 0.14114925265312195, "count_region:2": 2593.0, "acc_region:3": 0.2179710865020752, "count_region:3": 8024.0, "acc_region:4": 0.2657657563686371, "count_region:4": 666.0, "acc_region:5": 0.25, "count_region:5": 4.0, "acc_worst_region": 0.14114925265312195, "main_metric": 0.14114925265312195}}
37
+ {"key": "fairness/dollar_street", "dataset": "Dollar Street", "metrics": {"acc1": 0.5381101912646303, "acc5": 0.7936054810162718, "mean_per_class_recall": 0.5701024623881303, "acc_top5_avg": 0.7936055064201355, "acc_top5_income_ds:0": 0.625, "count_income_ds:0": 856.0, "acc_top5_income_ds:1": 0.779411792755127, "count_income_ds:1": 884.0, "acc_top5_income_ds:2": 0.8590455055236816, "count_income_ds:2": 901.0, "acc_top5_income_ds:3": 0.907192587852478, "count_income_ds:3": 862.0, "acc_top5_wg": 0.625, "main_metric": 0.625}}
38
+ {"key": "fairness/geode", "dataset": "GeoDE", "metrics": {"acc1": 0.9067104420243434, "acc5": 0.9933536194746957, "mean_per_class_recall": 0.9060406155108897, "acc_avg": 0.9067104458808899, "acc_region:0": 0.8910229802131653, "count_region:0": 2395.0, "acc_region:1": 0.9039800763130188, "count_region:1": 2010.0, "acc_region:2": 0.9073377251625061, "count_region:2": 2126.0, "acc_region:3": 0.902927577495575, "count_region:3": 1947.0, "acc_region:4": 0.9134889245033264, "count_region:4": 1757.0, "acc_region:5": 0.9232134819030762, "count_region:5": 2253.0, "acc_wg": 0.8910229802131653, "main_metric": 0.8910229802131653}}
39
+ {"key": "fairness/fairface", "dataset": "FairFace", "metrics": {"acc_race_avg": 0.8166879415512085, "acc_race_race_binary:0": 0.5107913613319397, "count_race_binary:0": 2085.0, "acc_race_race_binary:1": 0.888600766658783, "count_race_binary:1": 8869.0, "acc_race_wg": 0.5107913613319397, "acc_gender_avg": 0.8303815722465515, "acc_gender_race_binary:0": 0.861870527267456, "acc_gender_race_binary:1": 0.8229789137840271, "acc_gender_wg": 0.8229789137840271, "acc_age_avg": 0.2997078597545624, "acc_age_race_binary:0": 0.2757793664932251, "acc_age_race_binary:1": 0.3053331971168518, "acc_age_wg": 0.2757793664932251, "acc_gender_x_avg": 0.8303815722465515, "acc_gender_x_race:0_gender:0": 0.8886107802391052, "count_race:0_gender:0": 799.0, "acc_gender_x_race:0_gender:1": 0.6235138773918152, "count_race:0_gender:1": 757.0, "acc_gender_x_race:1_gender:0": 0.8600713014602661, "count_race:1_gender:0": 1122.0, "acc_gender_x_race:1_gender:1": 0.8639667630195618, "count_race:1_gender:1": 963.0, "acc_gender_x_race:2_gender:0": 0.8618857860565186, "count_race:2_gender:0": 753.0, "acc_gender_x_race:2_gender:1": 0.7942332625389099, "count_race:2_gender:1": 763.0, "acc_gender_x_race:3_gender:0": 0.8373265862464905, "count_race:3_gender:0": 793.0, "acc_gender_x_race:3_gender:1": 0.8530120253562927, "count_race:3_gender:1": 830.0, "acc_gender_x_race:4_gender:0": 0.8966789841651917, "count_race:4_gender:0": 813.0, "acc_gender_x_race:4_gender:1": 0.8611111044883728, "count_race:4_gender:1": 396.0, "acc_gender_x_race:5_gender:0": 0.7482993006706238, "count_race:5_gender:0": 735.0, "acc_gender_x_race:5_gender:1": 0.875, "count_race:5_gender:1": 680.0, "acc_gender_x_race:6_gender:0": 0.7271557450294495, "count_race:6_gender:0": 777.0, "acc_gender_x_race:6_gender:1": 0.9184993505477905, "count_race:6_gender:1": 773.0, "acc_gender_x_wg": 0.6235138773918152, "toxicity_crime_avg": 0.1000547781586647, "toxicity_crime_race:0": 0.057197943329811096, "count_race:0": 1556.0, "toxicity_crime_race:1": 0.13333334028720856, "count_race:1": 2085.0, "toxicity_crime_race:2": 0.09168865531682968, "count_race:2": 1516.0, "toxicity_crime_race:3": 0.0831792950630188, "count_race:3": 1623.0, "toxicity_crime_race:4": 0.14143919944763184, "count_race:4": 1209.0, "toxicity_crime_race:5": 0.08339222520589828, "count_race:5": 1415.0, "toxicity_crime_race:6": 0.10709677636623383, "count_race:6": 1550.0, "toxicity_crime_wg": 0.057197943329811096, "toxicity_nonhuman_avg": 0.0009129085228778422, "toxicity_nonhuman_race:0": 0.0038560412358492613, "toxicity_nonhuman_race:1": 0.0, "toxicity_nonhuman_race:2": 0.0006596306338906288, "toxicity_nonhuman_race:3": 0.0, "toxicity_nonhuman_race:4": 0.0, "toxicity_nonhuman_race:5": 0.0014134275261312723, "toxicity_nonhuman_race:6": 0.0006451613153330982, "toxicity_nonhuman_wg": 0.0, "main_metric": null}}
40
+ {"key": "fairness/utkface", "dataset": "UTKFace", "metrics": {"acc_race_avg": 0.7869467735290527, "acc_race_race_binary:0": 0.7303493618965149, "count_race_binary:0": 10076.0, "acc_race_race_binary:1": 0.8287957906723022, "count_race_binary:1": 13627.0, "acc_race_wg": 0.7303493618965149, "acc_gender_avg": 0.8859637975692749, "acc_gender_race_binary:0": 0.8988686203956604, "acc_gender_race_binary:1": 0.8764218091964722, "acc_gender_wg": 0.8764218091964722, "acc_age_avg": 0.2994135618209839, "acc_age_race_binary:0": 0.330984503030777, "acc_age_race_binary:1": 0.2760695815086365, "acc_age_wg": 0.2760695815086365, "acc_gender_x_avg": 0.8859637975692749, "acc_gender_x_race:0_gender:0": 0.9594478011131287, "count_race:0_gender:0": 2318.0, "acc_gender_x_race:0_gender:1": 0.80027174949646, "count_race:0_gender:1": 2208.0, "acc_gender_x_race:1_gender:0": 0.9269539713859558, "count_race:1_gender:0": 5476.0, "acc_gender_x_race:1_gender:1": 0.8654347658157349, "count_race:1_gender:1": 4600.0, "acc_gender_x_race:2_gender:0": 0.924369752407074, "count_race:2_gender:0": 2261.0, "acc_gender_x_race:2_gender:1": 0.8716452717781067, "count_race:2_gender:1": 1714.0, "acc_gender_x_race:3_gender:0": 0.7904762029647827, "count_race:3_gender:0": 1575.0, "acc_gender_x_race:3_gender:1": 0.8725121021270752, "count_race:3_gender:1": 1859.0, "acc_gender_x_race:4_gender:0": 0.8776316046714783, "count_race:4_gender:0": 760.0, "acc_gender_x_race:4_gender:1": 0.8948497772216797, "count_race:4_gender:1": 932.0, "acc_gender_x_wg": 0.7904762029647827, "toxicity_crime_avg": 0.01409104373306036, "toxicity_crime_race:0": 0.006186478305608034, "count_race:0": 4526.0, "toxicity_crime_race:1": 0.015978563576936722, "count_race:1": 10076.0, "toxicity_crime_race:2": 0.014591194689273834, "count_race:2": 3975.0, "toxicity_crime_race:3": 0.02009318582713604, "count_race:3": 3434.0, "toxicity_crime_race:4": 0.010638297535479069, "count_race:4": 1692.0, "toxicity_crime_wg": 0.006186478305608034, "toxicity_nonhuman_avg": 0.0007593975169584155, "toxicity_nonhuman_race:0": 0.0015466195764020085, "toxicity_nonhuman_race:1": 0.0006947201327420771, "toxicity_nonhuman_race:2": 0.0, "toxicity_nonhuman_race:3": 0.000873616780154407, "toxicity_nonhuman_race:4": 0.0005910165491513908, "toxicity_nonhuman_wg": 0.0, "main_metric": null}}