Muennighoff commited on
Commit
154c4e9
1 Parent(s): 1e82af0

Add MTEB evaluation

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. {eval → evaluation/beir}/beir.json +0 -0
  2. evaluation/mteb/AmazonCounterfactualClassification.json +82 -0
  3. evaluation/mteb/AmazonPolarityClassification.json +14 -0
  4. evaluation/mteb/AmazonReviewsClassification.json +94 -0
  5. evaluation/mteb/ArguAna.json +37 -0
  6. evaluation/mteb/ArxivClusteringP2P.json +9 -0
  7. evaluation/mteb/ArxivClusteringS2S.json +9 -0
  8. evaluation/mteb/AskUbuntuDupQuestions.json +9 -0
  9. evaluation/mteb/BIOSSES.json +19 -0
  10. evaluation/mteb/Banking77Classification.json +12 -0
  11. evaluation/mteb/BiorxivClusteringP2P.json +9 -0
  12. evaluation/mteb/BiorxivClusteringS2S.json +9 -0
  13. evaluation/mteb/CQADupstackAndroidRetrieval.json +37 -0
  14. evaluation/mteb/CQADupstackEnglishRetrieval.json +37 -0
  15. evaluation/mteb/CQADupstackGamingRetrieval.json +37 -0
  16. evaluation/mteb/CQADupstackGisRetrieval.json +37 -0
  17. evaluation/mteb/CQADupstackMathematicaRetrieval.json +37 -0
  18. evaluation/mteb/CQADupstackPhysicsRetrieval.json +37 -0
  19. evaluation/mteb/CQADupstackProgrammersRetrieval.json +37 -0
  20. evaluation/mteb/CQADupstackRetrieval.json +1 -0
  21. evaluation/mteb/CQADupstackStatsRetrieval.json +37 -0
  22. evaluation/mteb/CQADupstackTexRetrieval.json +37 -0
  23. evaluation/mteb/CQADupstackUnixRetrieval.json +37 -0
  24. evaluation/mteb/CQADupstackWebmastersRetrieval.json +37 -0
  25. evaluation/mteb/CQADupstackWordpressRetrieval.json +37 -0
  26. evaluation/mteb/ClimateFEVER.json +37 -0
  27. evaluation/mteb/DBPedia.json +37 -0
  28. evaluation/mteb/EmotionClassification.json +20 -0
  29. evaluation/mteb/FEVER.json +37 -0
  30. evaluation/mteb/FiQA2018.json +37 -0
  31. evaluation/mteb/HotpotQA.json +37 -0
  32. evaluation/mteb/ImdbClassification.json +14 -0
  33. evaluation/mteb/MSMARCO.json +37 -0
  34. evaluation/mteb/MTOPDomainClassification.json +94 -0
  35. evaluation/mteb/MTOPIntentClassification.json +94 -0
  36. evaluation/mteb/MassiveIntentClassification.json +724 -0
  37. evaluation/mteb/MassiveScenarioClassification.json +724 -0
  38. evaluation/mteb/MedrxivClusteringP2P.json +9 -0
  39. evaluation/mteb/MedrxivClusteringS2S.json +9 -0
  40. evaluation/mteb/MindSmallReranking.json +9 -0
  41. evaluation/mteb/NFCorpus.json +37 -0
  42. evaluation/mteb/NQ.json +37 -0
  43. evaluation/mteb/QuoraRetrieval.json +37 -0
  44. evaluation/mteb/RedditClustering.json +14 -0
  45. evaluation/mteb/RedditClusteringP2P.json +9 -0
  46. evaluation/mteb/SCIDOCS.json +37 -0
  47. evaluation/mteb/SGPT-125M-weightedmean-msmarco-specb-bitfit_results.csv +210 -0
  48. evaluation/mteb/SICK-R.json +17 -0
  49. evaluation/mteb/STS12.json +19 -0
  50. evaluation/mteb/STS13.json +19 -0
{eval → evaluation/beir}/beir.json RENAMED
File without changes
evaluation/mteb/AmazonCounterfactualClassification.json ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "de": {
4
+ "accuracy": 0.5688436830835117,
5
+ "accuracy_stderr": 0.029093897773991517,
6
+ "ap": 0.7267279104379771,
7
+ "ap_stderr": 0.010217580709985108,
8
+ "f1": 0.5444984024378641,
9
+ "f1_stderr": 0.02389292222743725,
10
+ "main_score": 0.7267279104379771
11
+ },
12
+ "en": {
13
+ "accuracy": 0.6123880597014926,
14
+ "accuracy_stderr": 0.031379155023926214,
15
+ "ap": 0.25854431650388643,
16
+ "ap_stderr": 0.016981404964613887,
17
+ "f1": 0.557518627628186,
18
+ "f1_stderr": 0.024487163893960967,
19
+ "main_score": 0.25854431650388643
20
+ },
21
+ "en-ext": {
22
+ "accuracy": 0.5827586206896551,
23
+ "accuracy_stderr": 0.048062843086995724,
24
+ "ap": 0.14067357642500386,
25
+ "ap_stderr": 0.013693676529546861,
26
+ "f1": 0.4817231851869133,
27
+ "f1_stderr": 0.030208933707168584,
28
+ "main_score": 0.14067357642500386
29
+ },
30
+ "evaluation_time": 52.9,
31
+ "ja": {
32
+ "accuracy": 0.5464668094218414,
33
+ "accuracy_stderr": 0.04850756984311426,
34
+ "ap": 0.11776694555054965,
35
+ "ap_stderr": 0.006424556406403228,
36
+ "f1": 0.44526622834078766,
37
+ "f1_stderr": 0.029061809459351582,
38
+ "main_score": 0.11776694555054965
39
+ }
40
+ },
41
+ "validation": {
42
+ "de": {
43
+ "accuracy": 0.5918454935622317,
44
+ "accuracy_stderr": 0.024350314081552426,
45
+ "ap": 0.7372017677238578,
46
+ "ap_stderr": 0.010683816932118138,
47
+ "f1": 0.5642882005866563,
48
+ "f1_stderr": 0.02007457192846663,
49
+ "main_score": 0.7372017677238578
50
+ },
51
+ "en": {
52
+ "accuracy": 0.6113432835820894,
53
+ "accuracy_stderr": 0.038501677223608914,
54
+ "ap": 0.2227104848944061,
55
+ "ap_stderr": 0.01397442904182502,
56
+ "f1": 0.5393378654542855,
57
+ "f1_stderr": 0.027447873327993155,
58
+ "main_score": 0.2227104848944061
59
+ },
60
+ "en-ext": {
61
+ "accuracy": 0.5848348348348348,
62
+ "accuracy_stderr": 0.043238290090035825,
63
+ "ap": 0.13520924912279636,
64
+ "ap_stderr": 0.011096096922529254,
65
+ "f1": 0.4794399892152111,
66
+ "f1_stderr": 0.02584441348504658,
67
+ "main_score": 0.13520924912279636
68
+ },
69
+ "evaluation_time": 43.78,
70
+ "ja": {
71
+ "accuracy": 0.5523605150214592,
72
+ "accuracy_stderr": 0.04902106769175921,
73
+ "ap": 0.11348392156508444,
74
+ "ap_stderr": 0.012428212574320868,
75
+ "f1": 0.4441089123442944,
76
+ "f1_stderr": 0.03136061225772234,
77
+ "main_score": 0.11348392156508444
78
+ }
79
+ },
80
+ "dataset_version": null,
81
+ "mteb_version": "0.0.2"
82
+ }
evaluation/mteb/AmazonPolarityClassification.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "accuracy": 0.65401225,
4
+ "accuracy_stderr": 0.03587243183229288,
5
+ "ap": 0.6022809958678552,
6
+ "ap_stderr": 0.028477219496347024,
7
+ "evaluation_time": 12198.12,
8
+ "f1": 0.650251824898292,
9
+ "f1_stderr": 0.037356425365799015,
10
+ "main_score": 0.65401225
11
+ },
12
+ "dataset_version": null,
13
+ "mteb_version": "0.0.2"
14
+ }
evaluation/mteb/AmazonReviewsClassification.json ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "de": {
4
+ "accuracy": 0.2479,
5
+ "accuracy_stderr": 0.01405709785126361,
6
+ "f1": 0.245833598854121,
7
+ "f1_stderr": 0.015461604594068135,
8
+ "main_score": 0.2479
9
+ },
10
+ "en": {
11
+ "accuracy": 0.31165999999999994,
12
+ "accuracy_stderr": 0.018214291092436177,
13
+ "f1": 0.30908870050167436,
14
+ "f1_stderr": 0.01830210891332752,
15
+ "main_score": 0.31165999999999994
16
+ },
17
+ "es": {
18
+ "accuracy": 0.26643999999999995,
19
+ "accuracy_stderr": 0.011960869533608332,
20
+ "f1": 0.2639012792213563,
21
+ "f1_stderr": 0.013800540988456476,
22
+ "main_score": 0.26643999999999995
23
+ },
24
+ "evaluation_time": 735.82,
25
+ "fr": {
26
+ "accuracy": 0.26386000000000004,
27
+ "accuracy_stderr": 0.017606828220892035,
28
+ "f1": 0.2627686779145487,
29
+ "f1_stderr": 0.01750120871837706,
30
+ "main_score": 0.26386000000000004
31
+ },
32
+ "ja": {
33
+ "accuracy": 0.22078000000000003,
34
+ "accuracy_stderr": 0.011609117106825998,
35
+ "f1": 0.21797960290226842,
36
+ "f1_stderr": 0.011959444775888735,
37
+ "main_score": 0.22078000000000003
38
+ },
39
+ "zh": {
40
+ "accuracy": 0.24274,
41
+ "accuracy_stderr": 0.007931733732293337,
42
+ "f1": 0.23887054434822627,
43
+ "f1_stderr": 0.008252486720387382,
44
+ "main_score": 0.24274
45
+ }
46
+ },
47
+ "validation": {
48
+ "de": {
49
+ "accuracy": 0.25168,
50
+ "accuracy_stderr": 0.015328457195686715,
51
+ "f1": 0.2502088133771333,
52
+ "f1_stderr": 0.01650287553492419,
53
+ "main_score": 0.25168
54
+ },
55
+ "en": {
56
+ "accuracy": 0.32438,
57
+ "accuracy_stderr": 0.022893920590410018,
58
+ "f1": 0.3212565428623893,
59
+ "f1_stderr": 0.021985479345985583,
60
+ "main_score": 0.32438
61
+ },
62
+ "es": {
63
+ "accuracy": 0.26704,
64
+ "accuracy_stderr": 0.012879068289282413,
65
+ "f1": 0.2641578465769073,
66
+ "f1_stderr": 0.01402375418117518,
67
+ "main_score": 0.26704
68
+ },
69
+ "evaluation_time": 740.71,
70
+ "fr": {
71
+ "accuracy": 0.26348,
72
+ "accuracy_stderr": 0.017907584985139675,
73
+ "f1": 0.26220101963133713,
74
+ "f1_stderr": 0.01807845581838548,
75
+ "main_score": 0.26348
76
+ },
77
+ "ja": {
78
+ "accuracy": 0.21562,
79
+ "accuracy_stderr": 0.011403666077187636,
80
+ "f1": 0.21286585397745714,
81
+ "f1_stderr": 0.011890703738952032,
82
+ "main_score": 0.21562
83
+ },
84
+ "zh": {
85
+ "accuracy": 0.23962,
86
+ "accuracy_stderr": 0.012780125195004927,
87
+ "f1": 0.23540552974934634,
88
+ "f1_stderr": 0.012407243106519977,
89
+ "main_score": 0.23962
90
+ }
91
+ },
92
+ "dataset_version": null,
93
+ "mteb_version": "0.0.2"
94
+ }
evaluation/mteb/ArguAna.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 76.89,
6
+ "map_at_1": 0.22404,
7
+ "map_at_10": 0.36845,
8
+ "map_at_100": 0.37945,
9
+ "map_at_1000": 0.37966,
10
+ "map_at_3": 0.3178,
11
+ "map_at_5": 0.34608,
12
+ "mrr_at_1": 0.22902,
13
+ "mrr_at_10": 0.37034,
14
+ "mrr_at_100": 0.38134,
15
+ "mrr_at_1000": 0.38155,
16
+ "mrr_at_3": 0.31935,
17
+ "mrr_at_5": 0.34812,
18
+ "ndcg_at_1": 0.22404,
19
+ "ndcg_at_10": 0.45425,
20
+ "ndcg_at_100": 0.50354,
21
+ "ndcg_at_1000": 0.50874,
22
+ "ndcg_at_3": 0.3497,
23
+ "ndcg_at_5": 0.40081,
24
+ "precision_at_1": 0.22404,
25
+ "precision_at_10": 0.07304,
26
+ "precision_at_100": 0.00951,
27
+ "precision_at_1000": 0.00099,
28
+ "precision_at_3": 0.14746,
29
+ "precision_at_5": 0.11337,
30
+ "recall_at_1": 0.22404,
31
+ "recall_at_10": 0.73044,
32
+ "recall_at_100": 0.95092,
33
+ "recall_at_1000": 0.99075,
34
+ "recall_at_3": 0.44239,
35
+ "recall_at_5": 0.56686
36
+ }
37
+ }
evaluation/mteb/ArxivClusteringP2P.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "evaluation_time": 4524.41,
4
+ "v_measure": 0.3970858340673288,
5
+ "v_measure_std": 0.13892512061332984
6
+ },
7
+ "dataset_version": null,
8
+ "mteb_version": "0.0.2"
9
+ }
evaluation/mteb/ArxivClusteringS2S.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "evaluation_time": 570.69,
4
+ "v_measure": 0.2824284771372105,
5
+ "v_measure_std": 0.14515433452032064
6
+ },
7
+ "dataset_version": null,
8
+ "mteb_version": "0.0.2"
9
+ }
evaluation/mteb/AskUbuntuDupQuestions.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "evaluation_time": 7.06,
4
+ "map": 0.5583700395192394,
5
+ "mrr": 0.7038913072154069
6
+ },
7
+ "dataset_version": null,
8
+ "mteb_version": "0.0.2"
9
+ }
evaluation/mteb/BIOSSES.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "cos_sim": {
4
+ "pearson": 0.7925366801756224,
5
+ "spearman": 0.7520954502580506
6
+ },
7
+ "euclidean": {
8
+ "pearson": 0.7879900722991617,
9
+ "spearman": 0.7779996549607588
10
+ },
11
+ "evaluation_time": 2.65,
12
+ "manhattan": {
13
+ "pearson": 0.7818408109480399,
14
+ "spearman": 0.7685958262303105
15
+ }
16
+ },
17
+ "dataset_version": null,
18
+ "mteb_version": "0.0.2"
19
+ }
evaluation/mteb/Banking77Classification.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "accuracy": 0.7770454545454545,
4
+ "accuracy_stderr": 0.007521053263962387,
5
+ "evaluation_time": 42.98,
6
+ "f1": 0.7769290001138031,
7
+ "f1_stderr": 0.007473720123531678,
8
+ "main_score": 0.7770454545454545
9
+ },
10
+ "dataset_version": null,
11
+ "mteb_version": "0.0.2"
12
+ }
evaluation/mteb/BiorxivClusteringP2P.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "evaluation_time": 553.9,
4
+ "v_measure": 0.33632603955439844,
5
+ "v_measure_std": 0.010408302575675535
6
+ },
7
+ "dataset_version": null,
8
+ "mteb_version": "0.0.2"
9
+ }
evaluation/mteb/BiorxivClusteringS2S.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "evaluation_time": 70.35,
4
+ "v_measure": 0.27038042665369927,
5
+ "v_measure_std": 0.007785436192603769
6
+ },
7
+ "dataset_version": null,
8
+ "mteb_version": "0.0.2"
9
+ }
evaluation/mteb/CQADupstackAndroidRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 64.44,
6
+ "map_at_1": 0.22139,
7
+ "map_at_10": 0.28839,
8
+ "map_at_100": 0.30023,
9
+ "map_at_1000": 0.30153,
10
+ "map_at_3": 0.26521,
11
+ "map_at_5": 0.27775,
12
+ "mrr_at_1": 0.26466,
13
+ "mrr_at_10": 0.33495,
14
+ "mrr_at_100": 0.34417,
15
+ "mrr_at_1000": 0.34485,
16
+ "mrr_at_3": 0.31402,
17
+ "mrr_at_5": 0.32496,
18
+ "ndcg_at_1": 0.26466,
19
+ "ndcg_at_10": 0.33372,
20
+ "ndcg_at_100": 0.387,
21
+ "ndcg_at_1000": 0.41696,
22
+ "ndcg_at_3": 0.29443,
23
+ "ndcg_at_5": 0.31121,
24
+ "precision_at_1": 0.26466,
25
+ "precision_at_10": 0.06037,
26
+ "precision_at_100": 0.01067,
27
+ "precision_at_1000": 0.00162,
28
+ "precision_at_3": 0.13782,
29
+ "precision_at_5": 0.09757,
30
+ "recall_at_1": 0.22139,
31
+ "recall_at_10": 0.4239,
32
+ "recall_at_100": 0.65427,
33
+ "recall_at_1000": 0.86049,
34
+ "recall_at_3": 0.31127,
35
+ "recall_at_5": 0.35718
36
+ }
37
+ }
evaluation/mteb/CQADupstackEnglishRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 95.23,
6
+ "map_at_1": 0.20652,
7
+ "map_at_10": 0.27558,
8
+ "map_at_100": 0.28473,
9
+ "map_at_1000": 0.28577,
10
+ "map_at_3": 0.25402,
11
+ "map_at_5": 0.2668,
12
+ "mrr_at_1": 0.25223,
13
+ "mrr_at_10": 0.31966,
14
+ "mrr_at_100": 0.32664,
15
+ "mrr_at_1000": 0.32724,
16
+ "mrr_at_3": 0.30074,
17
+ "mrr_at_5": 0.31249,
18
+ "ndcg_at_1": 0.25223,
19
+ "ndcg_at_10": 0.31694,
20
+ "ndcg_at_100": 0.35662,
21
+ "ndcg_at_1000": 0.38092,
22
+ "ndcg_at_3": 0.28294,
23
+ "ndcg_at_5": 0.30049,
24
+ "precision_at_1": 0.25223,
25
+ "precision_at_10": 0.05777,
26
+ "precision_at_100": 0.00973,
27
+ "precision_at_1000": 0.0014,
28
+ "precision_at_3": 0.13397,
29
+ "precision_at_5": 0.09605,
30
+ "recall_at_1": 0.20652,
31
+ "recall_at_10": 0.39368,
32
+ "recall_at_100": 0.56485,
33
+ "recall_at_1000": 0.73292,
34
+ "recall_at_3": 0.2983,
35
+ "recall_at_5": 0.3443
36
+ }
37
+ }
evaluation/mteb/CQADupstackGamingRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 104.86,
6
+ "map_at_1": 0.2518,
7
+ "map_at_10": 0.34579,
8
+ "map_at_100": 0.3559,
9
+ "map_at_1000": 0.3568,
10
+ "map_at_3": 0.31736,
11
+ "map_at_5": 0.33479,
12
+ "mrr_at_1": 0.29467,
13
+ "mrr_at_10": 0.37967,
14
+ "mrr_at_100": 0.388,
15
+ "mrr_at_1000": 0.38858,
16
+ "mrr_at_3": 0.35465,
17
+ "mrr_at_5": 0.37057,
18
+ "ndcg_at_1": 0.29467,
19
+ "ndcg_at_10": 0.39796,
20
+ "ndcg_at_100": 0.44531,
21
+ "ndcg_at_1000": 0.46666,
22
+ "ndcg_at_3": 0.34676,
23
+ "ndcg_at_5": 0.37468,
24
+ "precision_at_1": 0.29467,
25
+ "precision_at_10": 0.06602,
26
+ "precision_at_100": 0.0099,
27
+ "precision_at_1000": 0.00124,
28
+ "precision_at_3": 0.15569,
29
+ "precision_at_5": 0.11172,
30
+ "recall_at_1": 0.2518,
31
+ "recall_at_10": 0.52269,
32
+ "recall_at_100": 0.73574,
33
+ "recall_at_1000": 0.89141,
34
+ "recall_at_3": 0.38522,
35
+ "recall_at_5": 0.45323
36
+ }
37
+ }
evaluation/mteb/CQADupstackGisRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 141.33,
6
+ "map_at_1": 0.16303,
7
+ "map_at_10": 0.21629,
8
+ "map_at_100": 0.22388,
9
+ "map_at_1000": 0.22489,
10
+ "map_at_3": 0.19608,
11
+ "map_at_5": 0.20774,
12
+ "mrr_at_1": 0.1774,
13
+ "mrr_at_10": 0.23214,
14
+ "mrr_at_100": 0.2397,
15
+ "mrr_at_1000": 0.24054,
16
+ "mrr_at_3": 0.21243,
17
+ "mrr_at_5": 0.22322,
18
+ "ndcg_at_1": 0.1774,
19
+ "ndcg_at_10": 0.25113,
20
+ "ndcg_at_100": 0.29288,
21
+ "ndcg_at_1000": 0.32204,
22
+ "ndcg_at_3": 0.21111,
23
+ "ndcg_at_5": 0.23062,
24
+ "precision_at_1": 0.1774,
25
+ "precision_at_10": 0.03955,
26
+ "precision_at_100": 0.00644,
27
+ "precision_at_1000": 0.00093,
28
+ "precision_at_3": 0.08851,
29
+ "precision_at_5": 0.06418,
30
+ "recall_at_1": 0.16303,
31
+ "recall_at_10": 0.34487,
32
+ "recall_at_100": 0.54414,
33
+ "recall_at_1000": 0.77158,
34
+ "recall_at_3": 0.23733,
35
+ "recall_at_5": 0.28381
36
+ }
37
+ }
evaluation/mteb/CQADupstackMathematicaRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 78.71,
6
+ "map_at_1": 0.10133,
7
+ "map_at_10": 0.15666,
8
+ "map_at_100": 0.16592,
9
+ "map_at_1000": 0.16734,
10
+ "map_at_3": 0.13625,
11
+ "map_at_5": 0.14721,
12
+ "mrr_at_1": 0.12562,
13
+ "mrr_at_10": 0.18487,
14
+ "mrr_at_100": 0.19391,
15
+ "mrr_at_1000": 0.19487,
16
+ "mrr_at_3": 0.16418,
17
+ "mrr_at_5": 0.176,
18
+ "ndcg_at_1": 0.12562,
19
+ "ndcg_at_10": 0.1943,
20
+ "ndcg_at_100": 0.24546,
21
+ "ndcg_at_1000": 0.28193,
22
+ "ndcg_at_3": 0.1551,
23
+ "ndcg_at_5": 0.17322,
24
+ "precision_at_1": 0.12562,
25
+ "precision_at_10": 0.03794,
26
+ "precision_at_100": 0.0074,
27
+ "precision_at_1000": 0.00122,
28
+ "precision_at_3": 0.07546,
29
+ "precision_at_5": 0.05721,
30
+ "recall_at_1": 0.10133,
31
+ "recall_at_10": 0.28262,
32
+ "recall_at_100": 0.51743,
33
+ "recall_at_1000": 0.78075,
34
+ "recall_at_3": 0.17634,
35
+ "recall_at_5": 0.22129
36
+ }
37
+ }
evaluation/mteb/CQADupstackPhysicsRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 136.6,
6
+ "map_at_1": 0.19992,
7
+ "map_at_10": 0.27347,
8
+ "map_at_100": 0.28582,
9
+ "map_at_1000": 0.28716,
10
+ "map_at_3": 0.24907,
11
+ "map_at_5": 0.261,
12
+ "mrr_at_1": 0.23773,
13
+ "mrr_at_10": 0.31647,
14
+ "mrr_at_100": 0.32639,
15
+ "mrr_at_1000": 0.32706,
16
+ "mrr_at_3": 0.29195,
17
+ "mrr_at_5": 0.30484,
18
+ "ndcg_at_1": 0.23773,
19
+ "ndcg_at_10": 0.32322,
20
+ "ndcg_at_100": 0.37996,
21
+ "ndcg_at_1000": 0.40819,
22
+ "ndcg_at_3": 0.27876,
23
+ "ndcg_at_5": 0.29664,
24
+ "precision_at_1": 0.23773,
25
+ "precision_at_10": 0.05977,
26
+ "precision_at_100": 0.01055,
27
+ "precision_at_1000": 0.0015,
28
+ "precision_at_3": 0.13122,
29
+ "precision_at_5": 0.09451,
30
+ "recall_at_1": 0.19992,
31
+ "recall_at_10": 0.43106,
32
+ "recall_at_100": 0.67264,
33
+ "recall_at_1000": 0.86386,
34
+ "recall_at_3": 0.30392,
35
+ "recall_at_5": 0.34911
36
+ }
37
+ }
evaluation/mteb/CQADupstackProgrammersRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 130.5,
6
+ "map_at_1": 0.17896,
7
+ "map_at_10": 0.24644,
8
+ "map_at_100": 0.2579,
9
+ "map_at_1000": 0.25914,
10
+ "map_at_3": 0.22694,
11
+ "map_at_5": 0.2369,
12
+ "mrr_at_1": 0.21347,
13
+ "mrr_at_10": 0.28594,
14
+ "mrr_at_100": 0.29544,
15
+ "mrr_at_1000": 0.29621,
16
+ "mrr_at_3": 0.26807,
17
+ "mrr_at_5": 0.27669,
18
+ "ndcg_at_1": 0.21347,
19
+ "ndcg_at_10": 0.28833,
20
+ "ndcg_at_100": 0.34272,
21
+ "ndcg_at_1000": 0.37355,
22
+ "ndcg_at_3": 0.25373,
23
+ "ndcg_at_5": 0.26756,
24
+ "precision_at_1": 0.21347,
25
+ "precision_at_10": 0.05217,
26
+ "precision_at_100": 0.00954,
27
+ "precision_at_1000": 0.00139,
28
+ "precision_at_3": 0.11948,
29
+ "precision_at_5": 0.08425,
30
+ "recall_at_1": 0.17896,
31
+ "recall_at_10": 0.37291,
32
+ "recall_at_100": 0.61138,
33
+ "recall_at_1000": 0.83212,
34
+ "recall_at_3": 0.27706,
35
+ "recall_at_5": 0.31234
36
+ }
37
+ }
evaluation/mteb/CQADupstackRetrieval.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"dataset_version": null, "mteb_version": "0.0.2", "test": {"evaluation_time": 62.83, "map_at_1": 0.17195166666666667, "map_at_10": 0.23329083333333334, "map_at_100": 0.2430308333333333, "map_at_1000": 0.24422416666666666, "map_at_3": 0.21327416666666665, "map_at_5": 0.22419999999999998, "mrr_at_1": 0.19999916666666667, "mrr_at_10": 0.26390166666666665, "mrr_at_100": 0.27231, "mrr_at_1000": 0.27308333333333334, "mrr_at_3": 0.244675, "mrr_at_5": 0.25541083333333336, "ndcg_at_1": 0.19999916666666667, "ndcg_at_10": 0.27248666666666665, "ndcg_at_100": 0.3200258333333334, "ndcg_at_1000": 0.34946499999999997, "ndcg_at_3": 0.2358566666666667, "ndcg_at_5": 0.2526341666666666, "precision_at_1": 0.19999916666666667, "precision_at_10": 0.04772166666666666, "precision_at_100": 0.00847, "precision_at_1000": 0.0012741666666666667, "precision_at_3": 0.10756166666666668, "precision_at_5": 0.07725416666666667, "recall_at_1": 0.17195166666666667, "recall_at_10": 0.35990833333333344, "recall_at_100": 0.57468, "recall_at_1000": 0.7882366666666667, "recall_at_3": 0.25898499999999997, "recall_at_5": 0.30084333333333335}}
evaluation/mteb/CQADupstackStatsRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 177.3,
6
+ "map_at_1": 0.16779,
7
+ "map_at_10": 0.21557,
8
+ "map_at_100": 0.22338,
9
+ "map_at_1000": 0.22421,
10
+ "map_at_3": 0.19939,
11
+ "map_at_5": 0.20903,
12
+ "mrr_at_1": 0.18405,
13
+ "mrr_at_10": 0.23435,
14
+ "mrr_at_100": 0.24179,
15
+ "mrr_at_1000": 0.2425,
16
+ "mrr_at_3": 0.21907,
17
+ "mrr_at_5": 0.22781,
18
+ "ndcg_at_1": 0.18405,
19
+ "ndcg_at_10": 0.24515,
20
+ "ndcg_at_100": 0.28721,
21
+ "ndcg_at_1000": 0.3126,
22
+ "ndcg_at_3": 0.21508,
23
+ "ndcg_at_5": 0.2301,
24
+ "precision_at_1": 0.18405,
25
+ "precision_at_10": 0.03834,
26
+ "precision_at_100": 0.00641,
27
+ "precision_at_1000": 0.00093,
28
+ "precision_at_3": 0.09151,
29
+ "precision_at_5": 0.06503,
30
+ "recall_at_1": 0.16779,
31
+ "recall_at_10": 0.3173,
32
+ "recall_at_100": 0.51673,
33
+ "recall_at_1000": 0.71176,
34
+ "recall_at_3": 0.23518,
35
+ "recall_at_5": 0.27231
36
+ }
37
+ }
evaluation/mteb/CQADupstackTexRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 300.62,
6
+ "map_at_1": 0.09279,
7
+ "map_at_10": 0.13822,
8
+ "map_at_100": 0.14533,
9
+ "map_at_1000": 0.1465,
10
+ "map_at_3": 0.12396,
11
+ "map_at_5": 0.13214,
12
+ "mrr_at_1": 0.11149,
13
+ "mrr_at_10": 0.16139,
14
+ "mrr_at_100": 0.16872,
15
+ "mrr_at_1000": 0.16964,
16
+ "mrr_at_3": 0.14613,
17
+ "mrr_at_5": 0.15486,
18
+ "ndcg_at_1": 0.11149,
19
+ "ndcg_at_10": 0.1682,
20
+ "ndcg_at_100": 0.2073,
21
+ "ndcg_at_1000": 0.23894,
22
+ "ndcg_at_3": 0.1411,
23
+ "ndcg_at_5": 0.15404,
24
+ "precision_at_1": 0.11149,
25
+ "precision_at_10": 0.03063,
26
+ "precision_at_100": 0.00587,
27
+ "precision_at_1000": 0.001,
28
+ "precision_at_3": 0.06699,
29
+ "precision_at_5": 0.04928,
30
+ "recall_at_1": 0.09279,
31
+ "recall_at_10": 0.23745,
32
+ "recall_at_100": 0.41873,
33
+ "recall_at_1000": 0.64982,
34
+ "recall_at_3": 0.16152,
35
+ "recall_at_5": 0.19409
36
+ }
37
+ }
evaluation/mteb/CQADupstackUnixRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 185.1,
6
+ "map_at_1": 0.1636,
7
+ "map_at_10": 0.21927,
8
+ "map_at_100": 0.22889,
9
+ "map_at_1000": 0.22994,
10
+ "map_at_3": 0.20433,
11
+ "map_at_5": 0.21337,
12
+ "mrr_at_1": 0.1875,
13
+ "mrr_at_10": 0.24859,
14
+ "mrr_at_100": 0.25747,
15
+ "mrr_at_1000": 0.25829,
16
+ "mrr_at_3": 0.23383,
17
+ "mrr_at_5": 0.24297,
18
+ "ndcg_at_1": 0.1875,
19
+ "ndcg_at_10": 0.25372,
20
+ "ndcg_at_100": 0.30343,
21
+ "ndcg_at_1000": 0.33286,
22
+ "ndcg_at_3": 0.22627,
23
+ "ndcg_at_5": 0.2404,
24
+ "precision_at_1": 0.1875,
25
+ "precision_at_10": 0.04142,
26
+ "precision_at_100": 0.00738,
27
+ "precision_at_1000": 0.00111,
28
+ "precision_at_3": 0.10261,
29
+ "precision_at_5": 0.07164,
30
+ "recall_at_1": 0.1636,
31
+ "recall_at_10": 0.32949,
32
+ "recall_at_100": 0.55552,
33
+ "recall_at_1000": 0.77099,
34
+ "recall_at_3": 0.25538,
35
+ "recall_at_5": 0.29008
36
+ }
37
+ }
evaluation/mteb/CQADupstackWebmastersRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 62.83,
6
+ "map_at_1": 0.1739,
7
+ "map_at_10": 0.23058,
8
+ "map_at_100": 0.24445,
9
+ "map_at_1000": 0.24638,
10
+ "map_at_3": 0.21037,
11
+ "map_at_5": 0.21966,
12
+ "mrr_at_1": 0.1996,
13
+ "mrr_at_10": 0.26301,
14
+ "mrr_at_100": 0.27297,
15
+ "mrr_at_1000": 0.27375,
16
+ "mrr_at_3": 0.24341,
17
+ "mrr_at_5": 0.25339,
18
+ "ndcg_at_1": 0.1996,
19
+ "ndcg_at_10": 0.27249,
20
+ "ndcg_at_100": 0.32997,
21
+ "ndcg_at_1000": 0.36359,
22
+ "ndcg_at_3": 0.23519,
23
+ "ndcg_at_5": 0.24915,
24
+ "precision_at_1": 0.1996,
25
+ "precision_at_10": 0.05356,
26
+ "precision_at_100": 0.01198,
27
+ "precision_at_1000": 0.00204,
28
+ "precision_at_3": 0.10738,
29
+ "precision_at_5": 0.07905,
30
+ "recall_at_1": 0.1739,
31
+ "recall_at_10": 0.35255,
32
+ "recall_at_100": 0.61351,
33
+ "recall_at_1000": 0.84395,
34
+ "recall_at_3": 0.25194,
35
+ "recall_at_5": 0.28546
36
+ }
37
+ }
evaluation/mteb/CQADupstackWordpressRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 194.26,
6
+ "map_at_1": 0.14239,
7
+ "map_at_10": 0.19323,
8
+ "map_at_100": 0.19994,
9
+ "map_at_1000": 0.20103,
10
+ "map_at_3": 0.17631,
11
+ "map_at_5": 0.18401,
12
+ "mrr_at_1": 0.15157,
13
+ "mrr_at_10": 0.20578,
14
+ "mrr_at_100": 0.21252,
15
+ "mrr_at_1000": 0.21347,
16
+ "mrr_at_3": 0.18762,
17
+ "mrr_at_5": 0.19713,
18
+ "ndcg_at_1": 0.15157,
19
+ "ndcg_at_10": 0.22468,
20
+ "ndcg_at_100": 0.26245,
21
+ "ndcg_at_1000": 0.29534,
22
+ "ndcg_at_3": 0.18981,
23
+ "ndcg_at_5": 0.2035,
24
+ "precision_at_1": 0.15157,
25
+ "precision_at_10": 0.03512,
26
+ "precision_at_100": 0.00577,
27
+ "precision_at_1000": 0.00091,
28
+ "precision_at_3": 0.0801,
29
+ "precision_at_5": 0.05656,
30
+ "recall_at_1": 0.14239,
31
+ "recall_at_10": 0.31038,
32
+ "recall_at_100": 0.49122,
33
+ "recall_at_1000": 0.74919,
34
+ "recall_at_3": 0.21436,
35
+ "recall_at_5": 0.24692
36
+ }
37
+ }
evaluation/mteb/ClimateFEVER.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 3210.96,
6
+ "map_at_1": 0.08828,
7
+ "map_at_10": 0.14982,
8
+ "map_at_100": 0.16495,
9
+ "map_at_1000": 0.16658,
10
+ "map_at_3": 0.12366,
11
+ "map_at_5": 0.13655,
12
+ "mrr_at_1": 0.19088,
13
+ "mrr_at_10": 0.2929,
14
+ "mrr_at_100": 0.30291,
15
+ "mrr_at_1000": 0.30342,
16
+ "mrr_at_3": 0.25907,
17
+ "mrr_at_5": 0.27841,
18
+ "ndcg_at_1": 0.19088,
19
+ "ndcg_at_10": 0.21858,
20
+ "ndcg_at_100": 0.28324,
21
+ "ndcg_at_1000": 0.31561,
22
+ "ndcg_at_3": 0.17175,
23
+ "ndcg_at_5": 0.18869,
24
+ "precision_at_1": 0.19088,
25
+ "precision_at_10": 0.06919,
26
+ "precision_at_100": 0.01376,
27
+ "precision_at_1000": 0.00197,
28
+ "precision_at_3": 0.12704,
29
+ "precision_at_5": 0.09993,
30
+ "recall_at_1": 0.08828,
31
+ "recall_at_10": 0.27381,
32
+ "recall_at_100": 0.5,
33
+ "recall_at_1000": 0.68355,
34
+ "recall_at_3": 0.16118,
35
+ "recall_at_5": 0.20587
36
+ }
37
+ }
evaluation/mteb/DBPedia.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 2312.27,
6
+ "map_at_1": 0.05586,
7
+ "map_at_10": 0.1004,
8
+ "map_at_100": 0.1255,
9
+ "map_at_1000": 0.13124,
10
+ "map_at_3": 0.0775,
11
+ "map_at_5": 0.08836,
12
+ "mrr_at_1": 0.4225,
13
+ "mrr_at_10": 0.51206,
14
+ "mrr_at_100": 0.51818,
15
+ "mrr_at_1000": 0.51855,
16
+ "mrr_at_3": 0.48875,
17
+ "mrr_at_5": 0.50488,
18
+ "ndcg_at_1": 0.3225,
19
+ "ndcg_at_10": 0.22718,
20
+ "ndcg_at_100": 0.24359,
21
+ "ndcg_at_1000": 0.29232,
22
+ "ndcg_at_3": 0.25974,
23
+ "ndcg_at_5": 0.24292,
24
+ "precision_at_1": 0.4225,
25
+ "precision_at_10": 0.1775,
26
+ "precision_at_100": 0.05032,
27
+ "precision_at_1000": 0.01117,
28
+ "precision_at_3": 0.28833,
29
+ "precision_at_5": 0.2425,
30
+ "recall_at_1": 0.05586,
31
+ "recall_at_10": 0.1416,
32
+ "recall_at_100": 0.28051,
33
+ "recall_at_1000": 0.45157,
34
+ "recall_at_3": 0.08758,
35
+ "recall_at_5": 0.10976
36
+ }
37
+ }
evaluation/mteb/EmotionClassification.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "accuracy": 0.39075000000000004,
4
+ "accuracy_stderr": 0.02011000994529839,
5
+ "evaluation_time": 21.65,
6
+ "f1": 0.3501420354708222,
7
+ "f1_stderr": 0.015595291440010818,
8
+ "main_score": 0.39075000000000004
9
+ },
10
+ "validation": {
11
+ "accuracy": 0.40695,
12
+ "accuracy_stderr": 0.03307298746711582,
13
+ "evaluation_time": 24.95,
14
+ "f1": 0.3668185447111504,
15
+ "f1_stderr": 0.02395586698305724,
16
+ "main_score": 0.40695
17
+ },
18
+ "dataset_version": null,
19
+ "mteb_version": "0.0.2"
20
+ }
evaluation/mteb/FEVER.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 4114.33,
6
+ "map_at_1": 0.4352,
7
+ "map_at_10": 0.54368,
8
+ "map_at_100": 0.54918,
9
+ "map_at_1000": 0.54942,
10
+ "map_at_3": 0.51712,
11
+ "map_at_5": 0.53336,
12
+ "mrr_at_1": 0.46955,
13
+ "mrr_at_10": 0.58219,
14
+ "mrr_at_100": 0.58735,
15
+ "mrr_at_1000": 0.58753,
16
+ "mrr_at_3": 0.55518,
17
+ "mrr_at_5": 0.57191,
18
+ "ndcg_at_1": 0.46955,
19
+ "ndcg_at_10": 0.6045,
20
+ "ndcg_at_100": 0.63047,
21
+ "ndcg_at_1000": 0.63713,
22
+ "ndcg_at_3": 0.55233,
23
+ "ndcg_at_5": 0.58072,
24
+ "precision_at_1": 0.46955,
25
+ "precision_at_10": 0.08267,
26
+ "precision_at_100": 0.00962,
27
+ "precision_at_1000": 0.00103,
28
+ "precision_at_3": 0.22327,
29
+ "precision_at_5": 0.14941,
30
+ "recall_at_1": 0.4352,
31
+ "recall_at_10": 0.75632,
32
+ "recall_at_100": 0.87416,
33
+ "recall_at_1000": 0.92557,
34
+ "recall_at_3": 0.61597,
35
+ "recall_at_5": 0.68518
36
+ }
37
+ }
evaluation/mteb/FiQA2018.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 127.8,
6
+ "map_at_1": 0.09549,
7
+ "map_at_10": 0.15762,
8
+ "map_at_100": 0.17142,
9
+ "map_at_1000": 0.17329,
10
+ "map_at_3": 0.13575,
11
+ "map_at_5": 0.14754,
12
+ "mrr_at_1": 0.19753,
13
+ "mrr_at_10": 0.26568,
14
+ "mrr_at_100": 0.27606,
15
+ "mrr_at_1000": 0.2768,
16
+ "mrr_at_3": 0.24203,
17
+ "mrr_at_5": 0.25669,
18
+ "ndcg_at_1": 0.19753,
19
+ "ndcg_at_10": 0.21118,
20
+ "ndcg_at_100": 0.27308,
21
+ "ndcg_at_1000": 0.31304,
22
+ "ndcg_at_3": 0.18319,
23
+ "ndcg_at_5": 0.19414,
24
+ "precision_at_1": 0.19753,
25
+ "precision_at_10": 0.0608,
26
+ "precision_at_100": 0.01204,
27
+ "precision_at_1000": 0.00192,
28
+ "precision_at_3": 0.12191,
29
+ "precision_at_5": 0.09383,
30
+ "recall_at_1": 0.09549,
31
+ "recall_at_10": 0.26131,
32
+ "recall_at_100": 0.50545,
33
+ "recall_at_1000": 0.74968,
34
+ "recall_at_3": 0.16951,
35
+ "recall_at_5": 0.2095
36
+ }
37
+ }
evaluation/mteb/HotpotQA.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 3659.36,
6
+ "map_at_1": 0.25544,
7
+ "map_at_10": 0.3262,
8
+ "map_at_100": 0.33275,
9
+ "map_at_1000": 0.33344,
10
+ "map_at_3": 0.30851,
11
+ "map_at_5": 0.31869,
12
+ "mrr_at_1": 0.51087,
13
+ "mrr_at_10": 0.57704,
14
+ "mrr_at_100": 0.58175,
15
+ "mrr_at_1000": 0.58207,
16
+ "mrr_at_3": 0.56106,
17
+ "mrr_at_5": 0.57074,
18
+ "ndcg_at_1": 0.51087,
19
+ "ndcg_at_10": 0.40876,
20
+ "ndcg_at_100": 0.43762,
21
+ "ndcg_at_1000": 0.45423,
22
+ "ndcg_at_3": 0.3765,
23
+ "ndcg_at_5": 0.39305,
24
+ "precision_at_1": 0.51087,
25
+ "precision_at_10": 0.08304,
26
+ "precision_at_100": 0.01059,
27
+ "precision_at_1000": 0.00128,
28
+ "precision_at_3": 0.22876,
29
+ "precision_at_5": 0.15033,
30
+ "recall_at_1": 0.25544,
31
+ "recall_at_10": 0.41519,
32
+ "recall_at_100": 0.52957,
33
+ "recall_at_1000": 0.64132,
34
+ "recall_at_3": 0.34315,
35
+ "recall_at_5": 0.37583
36
+ }
37
+ }
evaluation/mteb/ImdbClassification.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "accuracy": 0.586696,
4
+ "accuracy_stderr": 0.04366294612139682,
5
+ "ap": 0.553644880984279,
6
+ "ap_stderr": 0.02927763434288163,
7
+ "evaluation_time": 1493.91,
8
+ "f1": 0.5807942097405652,
9
+ "f1_stderr": 0.046729535979483976,
10
+ "main_score": 0.586696
11
+ },
12
+ "dataset_version": null,
13
+ "mteb_version": "0.0.2"
14
+ }
evaluation/mteb/MSMARCO.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "validation": {
5
+ "evaluation_time": 6441.95,
6
+ "map_at_1": 0.14442,
7
+ "map_at_10": 0.22932,
8
+ "map_at_100": 0.24132,
9
+ "map_at_1000": 0.24213,
10
+ "map_at_3": 0.20002,
11
+ "map_at_5": 0.21636,
12
+ "mrr_at_1": 0.14842,
13
+ "mrr_at_10": 0.23416,
14
+ "mrr_at_100": 0.24594,
15
+ "mrr_at_1000": 0.24669,
16
+ "mrr_at_3": 0.20494,
17
+ "mrr_at_5": 0.2214,
18
+ "ndcg_at_1": 0.14842,
19
+ "ndcg_at_10": 0.27975,
20
+ "ndcg_at_100": 0.34143,
21
+ "ndcg_at_1000": 0.3637,
22
+ "ndcg_at_3": 0.21944,
23
+ "ndcg_at_5": 0.24881,
24
+ "precision_at_1": 0.14842,
25
+ "precision_at_10": 0.04537,
26
+ "precision_at_100": 0.00767,
27
+ "precision_at_1000": 0.00096,
28
+ "precision_at_3": 0.09322,
29
+ "precision_at_5": 0.07074,
30
+ "recall_at_1": 0.14442,
31
+ "recall_at_10": 0.43557,
32
+ "recall_at_100": 0.72904,
33
+ "recall_at_1000": 0.90407,
34
+ "recall_at_3": 0.27088,
35
+ "recall_at_5": 0.34144
36
+ }
37
+ }
evaluation/mteb/MTOPDomainClassification.json ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "de": {
4
+ "accuracy": 0.6273034657650043,
5
+ "accuracy_stderr": 0.026750019843962445,
6
+ "f1": 0.6078623915840713,
7
+ "f1_stderr": 0.024545709453124315,
8
+ "main_score": 0.6273034657650043
9
+ },
10
+ "en": {
11
+ "accuracy": 0.8695622435020519,
12
+ "accuracy_stderr": 0.005091511188494251,
13
+ "f1": 0.8658363130708494,
14
+ "f1_stderr": 0.0051871184408422396,
15
+ "main_score": 0.8695622435020519
16
+ },
17
+ "es": {
18
+ "accuracy": 0.6754503002001334,
19
+ "accuracy_stderr": 0.018521804912221235,
20
+ "f1": 0.6534879794116112,
21
+ "f1_stderr": 0.017925402376902783,
22
+ "main_score": 0.6754503002001334
23
+ },
24
+ "evaluation_time": 184.8,
25
+ "fr": {
26
+ "accuracy": 0.653523332289383,
27
+ "accuracy_stderr": 0.021385772427901146,
28
+ "f1": 0.6299400188244665,
29
+ "f1_stderr": 0.019299664587033034,
30
+ "main_score": 0.653523332289383
31
+ },
32
+ "hi": {
33
+ "accuracy": 0.45371100752958055,
34
+ "accuracy_stderr": 0.00950289876773436,
35
+ "f1": 0.4426285860740745,
36
+ "f1_stderr": 0.008239449950704894,
37
+ "main_score": 0.45371100752958055
38
+ },
39
+ "th": {
40
+ "accuracy": 0.5527667269439421,
41
+ "accuracy_stderr": 0.026744140679562094,
42
+ "f1": 0.5328388179869588,
43
+ "f1_stderr": 0.024911722678940297,
44
+ "main_score": 0.5527667269439421
45
+ }
46
+ },
47
+ "validation": {
48
+ "de": {
49
+ "accuracy": 0.623030303030303,
50
+ "accuracy_stderr": 0.018007006691627983,
51
+ "f1": 0.6090030451296102,
52
+ "f1_stderr": 0.016252335030927607,
53
+ "main_score": 0.623030303030303
54
+ },
55
+ "en": {
56
+ "accuracy": 0.8647874720357942,
57
+ "accuracy_stderr": 0.008134813907732269,
58
+ "f1": 0.8643221005490525,
59
+ "f1_stderr": 0.007924300687945415,
60
+ "main_score": 0.8647874720357942
61
+ },
62
+ "es": {
63
+ "accuracy": 0.6768172888015718,
64
+ "accuracy_stderr": 0.017930410630693398,
65
+ "f1": 0.6614808491907962,
66
+ "f1_stderr": 0.01724199330369825,
67
+ "main_score": 0.6768172888015718
68
+ },
69
+ "evaluation_time": 129.45,
70
+ "fr": {
71
+ "accuracy": 0.6497780596068484,
72
+ "accuracy_stderr": 0.025761089702497523,
73
+ "f1": 0.6384542819967916,
74
+ "f1_stderr": 0.02439328627444013,
75
+ "main_score": 0.6497780596068484
76
+ },
77
+ "hi": {
78
+ "accuracy": 0.44483101391650104,
79
+ "accuracy_stderr": 0.010657528607643997,
80
+ "f1": 0.441310151844022,
81
+ "f1_stderr": 0.007308843847135203,
82
+ "main_score": 0.44483101391650104
83
+ },
84
+ "th": {
85
+ "accuracy": 0.5390783961699582,
86
+ "accuracy_stderr": 0.021304608019150205,
87
+ "f1": 0.5230342245719538,
88
+ "f1_stderr": 0.0192316272968326,
89
+ "main_score": 0.5390783961699582
90
+ }
91
+ },
92
+ "dataset_version": null,
93
+ "mteb_version": "0.0.2"
94
+ }
evaluation/mteb/MTOPIntentClassification.json ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "de": {
4
+ "accuracy": 0.4956043956043956,
5
+ "accuracy_stderr": 0.013804089435250997,
6
+ "f1": 0.32863336734985976,
7
+ "f1_stderr": 0.010106922158537998,
8
+ "main_score": 0.4956043956043956
9
+ },
10
+ "en": {
11
+ "accuracy": 0.6225262197902417,
12
+ "accuracy_stderr": 0.011612339032627523,
13
+ "f1": 0.43440840371488526,
14
+ "f1_stderr": 0.007521212698716522,
15
+ "main_score": 0.6225262197902417
16
+ },
17
+ "es": {
18
+ "accuracy": 0.4993995997331555,
19
+ "accuracy_stderr": 0.017333218216550737,
20
+ "f1": 0.34726671876888127,
21
+ "f1_stderr": 0.009821507962106622,
22
+ "main_score": 0.4993995997331555
23
+ },
24
+ "evaluation_time": 336.12,
25
+ "fr": {
26
+ "accuracy": 0.46329470717193855,
27
+ "accuracy_stderr": 0.021175134055927164,
28
+ "f1": 0.3232527361598279,
29
+ "f1_stderr": 0.015670594458593846,
30
+ "main_score": 0.46329470717193855
31
+ },
32
+ "hi": {
33
+ "accuracy": 0.3220867694514163,
34
+ "accuracy_stderr": 0.007943432090835067,
35
+ "f1": 0.21321851228151392,
36
+ "f1_stderr": 0.006969308099265944,
37
+ "main_score": 0.3220867694514163
38
+ },
39
+ "th": {
40
+ "accuracy": 0.43627486437613017,
41
+ "accuracy_stderr": 0.013057059666123479,
42
+ "f1": 0.27048729223475076,
43
+ "f1_stderr": 0.007192319024223997,
44
+ "main_score": 0.43627486437613017
45
+ }
46
+ },
47
+ "validation": {
48
+ "de": {
49
+ "accuracy": 0.4886501377410468,
50
+ "accuracy_stderr": 0.015115810673577453,
51
+ "f1": 0.29305729988528817,
52
+ "f1_stderr": 0.013274418090447234,
53
+ "main_score": 0.4886501377410468
54
+ },
55
+ "en": {
56
+ "accuracy": 0.6184787472035794,
57
+ "accuracy_stderr": 0.01880584337530569,
58
+ "f1": 0.43633936644556937,
59
+ "f1_stderr": 0.012308022004906816,
60
+ "main_score": 0.6184787472035794
61
+ },
62
+ "es": {
63
+ "accuracy": 0.4988212180746562,
64
+ "accuracy_stderr": 0.018919199418584817,
65
+ "f1": 0.31473396095509043,
66
+ "f1_stderr": 0.012676416625486455,
67
+ "main_score": 0.4988212180746562
68
+ },
69
+ "evaluation_time": 282.16,
70
+ "fr": {
71
+ "accuracy": 0.46119213696892836,
72
+ "accuracy_stderr": 0.021845344865718933,
73
+ "f1": 0.28044706940385955,
74
+ "f1_stderr": 0.010116190733840541,
75
+ "main_score": 0.46119213696892836
76
+ },
77
+ "hi": {
78
+ "accuracy": 0.3071570576540755,
79
+ "accuracy_stderr": 0.008319669784761797,
80
+ "f1": 0.19812640558669523,
81
+ "f1_stderr": 0.0072837780604048784,
82
+ "main_score": 0.3071570576540755
83
+ },
84
+ "th": {
85
+ "accuracy": 0.42160383004189106,
86
+ "accuracy_stderr": 0.010170908236196702,
87
+ "f1": 0.26845354880486205,
88
+ "f1_stderr": 0.010016659553097792,
89
+ "main_score": 0.42160383004189106
90
+ }
91
+ },
92
+ "dataset_version": null,
93
+ "mteb_version": "0.0.2"
94
+ }
evaluation/mteb/MassiveIntentClassification.json ADDED
@@ -0,0 +1,724 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "af": {
4
+ "accuracy": 0.4054808338937458,
5
+ "accuracy_stderr": 0.014447259403752678,
6
+ "f1": 0.39490307545239717,
7
+ "f1_stderr": 0.009212717959217511,
8
+ "main_score": 0.4054808338937458
9
+ },
10
+ "am": {
11
+ "accuracy": 0.2418291862811029,
12
+ "accuracy_stderr": 0.012904896103629838,
13
+ "f1": 0.23437620034727474,
14
+ "f1_stderr": 0.009361920647479921,
15
+ "main_score": 0.2418291862811029
16
+ },
17
+ "ar": {
18
+ "accuracy": 0.30134498991257563,
19
+ "accuracy_stderr": 0.014810775844007782,
20
+ "f1": 0.28787175191531283,
21
+ "f1_stderr": 0.012174439394246464,
22
+ "main_score": 0.30134498991257563
23
+ },
24
+ "az": {
25
+ "accuracy": 0.35884330867518494,
26
+ "accuracy_stderr": 0.012717802447884442,
27
+ "f1": 0.36264500398782124,
28
+ "f1_stderr": 0.010633859333577812,
29
+ "main_score": 0.35884330867518494
30
+ },
31
+ "bn": {
32
+ "accuracy": 0.2917283120376597,
33
+ "accuracy_stderr": 0.011891186572931844,
34
+ "f1": 0.278101616531901,
35
+ "f1_stderr": 0.010627299989024891,
36
+ "main_score": 0.2917283120376597
37
+ },
38
+ "cy": {
39
+ "accuracy": 0.41788836583725625,
40
+ "accuracy_stderr": 0.015348362273085753,
41
+ "f1": 0.39714131810548015,
42
+ "f1_stderr": 0.009372174520376655,
43
+ "main_score": 0.41788836583725625
44
+ },
45
+ "da": {
46
+ "accuracy": 0.44176193678547404,
47
+ "accuracy_stderr": 0.00924625540944034,
48
+ "f1": 0.4219249982655229,
49
+ "f1_stderr": 0.008618307806061426,
50
+ "main_score": 0.44176193678547404
51
+ },
52
+ "de": {
53
+ "accuracy": 0.4207464694014795,
54
+ "accuracy_stderr": 0.006478416557311502,
55
+ "f1": 0.39441882591831623,
56
+ "f1_stderr": 0.008645545638557534,
57
+ "main_score": 0.4207464694014795
58
+ },
59
+ "el": {
60
+ "accuracy": 0.362542030934768,
61
+ "accuracy_stderr": 0.012041979440806665,
62
+ "f1": 0.3446592715936761,
63
+ "f1_stderr": 0.009835042341951889,
64
+ "main_score": 0.362542030934768
65
+ },
66
+ "en": {
67
+ "accuracy": 0.6140887693342301,
68
+ "accuracy_stderr": 0.015504655249298095,
69
+ "f1": 0.5979854802683996,
70
+ "f1_stderr": 0.01208669884455989,
71
+ "main_score": 0.6140887693342301
72
+ },
73
+ "es": {
74
+ "accuracy": 0.42679892400806996,
75
+ "accuracy_stderr": 0.015347073096769526,
76
+ "f1": 0.4204801248338172,
77
+ "f1_stderr": 0.010741334091620194,
78
+ "main_score": 0.42679892400806996
79
+ },
80
+ "evaluation_time": 2374.32,
81
+ "fa": {
82
+ "accuracy": 0.3559179556153329,
83
+ "accuracy_stderr": 0.009888477522743777,
84
+ "f1": 0.34045862930486165,
85
+ "f1_stderr": 0.0084840956345157,
86
+ "main_score": 0.3559179556153329
87
+ },
88
+ "fi": {
89
+ "accuracy": 0.40036987222595827,
90
+ "accuracy_stderr": 0.013803399246107682,
91
+ "f1": 0.3811770343936278,
92
+ "f1_stderr": 0.01028587503250668,
93
+ "main_score": 0.40036987222595827
94
+ },
95
+ "fr": {
96
+ "accuracy": 0.4343981170141224,
97
+ "accuracy_stderr": 0.012246472905114743,
98
+ "f1": 0.42708438898786494,
99
+ "f1_stderr": 0.009512925256512221,
100
+ "main_score": 0.4343981170141224
101
+ },
102
+ "he": {
103
+ "accuracy": 0.3159381304640215,
104
+ "accuracy_stderr": 0.014973549858983267,
105
+ "f1": 0.2998550522450782,
106
+ "f1_stderr": 0.011727646762679908,
107
+ "main_score": 0.3159381304640215
108
+ },
109
+ "hi": {
110
+ "accuracy": 0.27044384667114996,
111
+ "accuracy_stderr": 0.007563774725762617,
112
+ "f1": 0.27313059184832666,
113
+ "f1_stderr": 0.008056028697421164,
114
+ "main_score": 0.27044384667114996
115
+ },
116
+ "hu": {
117
+ "accuracy": 0.38453261600538,
118
+ "accuracy_stderr": 0.01598445677659242,
119
+ "f1": 0.37309189326110437,
120
+ "f1_stderr": 0.010195896901809987,
121
+ "main_score": 0.38453261600538
122
+ },
123
+ "hy": {
124
+ "accuracy": 0.2797915265635508,
125
+ "accuracy_stderr": 0.015095564553015866,
126
+ "f1": 0.27430939684346445,
127
+ "f1_stderr": 0.011012889120934774,
128
+ "main_score": 0.2797915265635508
129
+ },
130
+ "id": {
131
+ "accuracy": 0.4397108271687963,
132
+ "accuracy_stderr": 0.017542356737953325,
133
+ "f1": 0.43405857056887615,
134
+ "f1_stderr": 0.010528016012937845,
135
+ "main_score": 0.4397108271687963
136
+ },
137
+ "is": {
138
+ "accuracy": 0.40302622730329524,
139
+ "accuracy_stderr": 0.012582306948623233,
140
+ "f1": 0.39108052180520747,
141
+ "f1_stderr": 0.009992598454055583,
142
+ "main_score": 0.40302622730329524
143
+ },
144
+ "it": {
145
+ "accuracy": 0.45474108944182917,
146
+ "accuracy_stderr": 0.01340459754818702,
147
+ "f1": 0.4585950328241134,
148
+ "f1_stderr": 0.010236110188058374,
149
+ "main_score": 0.45474108944182917
150
+ },
151
+ "ja": {
152
+ "accuracy": 0.4560860793544048,
153
+ "accuracy_stderr": 0.010590674677296958,
154
+ "f1": 0.4394920708216737,
155
+ "f1_stderr": 0.010641769554373246,
156
+ "main_score": 0.4560860793544048
157
+ },
158
+ "jv": {
159
+ "accuracy": 0.386684599865501,
160
+ "accuracy_stderr": 0.012145936599746584,
161
+ "f1": 0.37699003401885905,
162
+ "f1_stderr": 0.00943165160039381,
163
+ "main_score": 0.386684599865501
164
+ },
165
+ "ka": {
166
+ "accuracy": 0.25652320107599197,
167
+ "accuracy_stderr": 0.008190163090884097,
168
+ "f1": 0.25279084273189584,
169
+ "f1_stderr": 0.00943958002987095,
170
+ "main_score": 0.25652320107599197
171
+ },
172
+ "km": {
173
+ "accuracy": 0.28295225285810355,
174
+ "accuracy_stderr": 0.011478062668929577,
175
+ "f1": 0.2664582563877155,
176
+ "f1_stderr": 0.007309349177116762,
177
+ "main_score": 0.28295225285810355
178
+ },
179
+ "kn": {
180
+ "accuracy": 0.23480161398789506,
181
+ "accuracy_stderr": 0.013124898182906054,
182
+ "f1": 0.22275241866506734,
183
+ "f1_stderr": 0.009107560102876623,
184
+ "main_score": 0.23480161398789506
185
+ },
186
+ "ko": {
187
+ "accuracy": 0.3655682582380632,
188
+ "accuracy_stderr": 0.0138514372378647,
189
+ "f1": 0.3600475317106361,
190
+ "f1_stderr": 0.012525556508818685,
191
+ "main_score": 0.3655682582380632
192
+ },
193
+ "lv": {
194
+ "accuracy": 0.4184936112979153,
195
+ "accuracy_stderr": 0.01503834658744963,
196
+ "f1": 0.4138932672359119,
197
+ "f1_stderr": 0.009112866608871466,
198
+ "main_score": 0.4184936112979153
199
+ },
200
+ "ml": {
201
+ "accuracy": 0.2490921318090114,
202
+ "accuracy_stderr": 0.011318911223656808,
203
+ "f1": 0.23968687483768808,
204
+ "f1_stderr": 0.009623239538185528,
205
+ "main_score": 0.2490921318090114
206
+ },
207
+ "mn": {
208
+ "accuracy": 0.2986213853396099,
209
+ "accuracy_stderr": 0.013066535467207235,
210
+ "f1": 0.2997715207525541,
211
+ "f1_stderr": 0.012860089685643984,
212
+ "main_score": 0.2986213853396099
213
+ },
214
+ "ms": {
215
+ "accuracy": 0.4242098184263618,
216
+ "accuracy_stderr": 0.013890887562095782,
217
+ "f1": 0.4150877432664628,
218
+ "f1_stderr": 0.012091881563068337,
219
+ "main_score": 0.4242098184263618
220
+ },
221
+ "my": {
222
+ "accuracy": 0.25131136516476127,
223
+ "accuracy_stderr": 0.0075325439284173994,
224
+ "f1": 0.23938932214086775,
225
+ "f1_stderr": 0.006306918181473474,
226
+ "main_score": 0.25131136516476127
227
+ },
228
+ "nb": {
229
+ "accuracy": 0.3981506388702084,
230
+ "accuracy_stderr": 0.013968199431054802,
231
+ "f1": 0.3880958658779166,
232
+ "f1_stderr": 0.011576475125850125,
233
+ "main_score": 0.3981506388702084
234
+ },
235
+ "nl": {
236
+ "accuracy": 0.4362138533960995,
237
+ "accuracy_stderr": 0.01079982721922183,
238
+ "f1": 0.4201386842914633,
239
+ "f1_stderr": 0.009992268819898372,
240
+ "main_score": 0.4362138533960995
241
+ },
242
+ "pl": {
243
+ "accuracy": 0.4219569603227976,
244
+ "accuracy_stderr": 0.01868909945354249,
245
+ "f1": 0.4000556559825827,
246
+ "f1_stderr": 0.012543479799886282,
247
+ "main_score": 0.4219569603227976
248
+ },
249
+ "pt": {
250
+ "accuracy": 0.4520847343644923,
251
+ "accuracy_stderr": 0.01543976511380644,
252
+ "f1": 0.44241150050290506,
253
+ "f1_stderr": 0.008326917982409131,
254
+ "main_score": 0.4520847343644923
255
+ },
256
+ "ro": {
257
+ "accuracy": 0.4180901143241426,
258
+ "accuracy_stderr": 0.016105657510711916,
259
+ "f1": 0.40474074848670083,
260
+ "f1_stderr": 0.015837216995188204,
261
+ "main_score": 0.4180901143241426
262
+ },
263
+ "ru": {
264
+ "accuracy": 0.3596839273705447,
265
+ "accuracy_stderr": 0.019516291427541597,
266
+ "f1": 0.35095456843621,
267
+ "f1_stderr": 0.012440228318941022,
268
+ "main_score": 0.3596839273705447
269
+ },
270
+ "sl": {
271
+ "accuracy": 0.40605245460659045,
272
+ "accuracy_stderr": 0.01602119555635458,
273
+ "f1": 0.39302383051500134,
274
+ "f1_stderr": 0.0108929281580567,
275
+ "main_score": 0.40605245460659045
276
+ },
277
+ "sq": {
278
+ "accuracy": 0.42757229320780094,
279
+ "accuracy_stderr": 0.013061126335718017,
280
+ "f1": 0.41537639314973884,
281
+ "f1_stderr": 0.011214750874227073,
282
+ "main_score": 0.42757229320780094
283
+ },
284
+ "sv": {
285
+ "accuracy": 0.42347007397444514,
286
+ "accuracy_stderr": 0.01443766953082292,
287
+ "f1": 0.41043660179486263,
288
+ "f1_stderr": 0.009119701786380115,
289
+ "main_score": 0.42347007397444514
290
+ },
291
+ "sw": {
292
+ "accuracy": 0.4112306657700067,
293
+ "accuracy_stderr": 0.016030226034380948,
294
+ "f1": 0.39712940473289027,
295
+ "f1_stderr": 0.01079211644663987,
296
+ "main_score": 0.4112306657700067
297
+ },
298
+ "ta": {
299
+ "accuracy": 0.24603227975790182,
300
+ "accuracy_stderr": 0.007348449965253495,
301
+ "f1": 0.23969236788828607,
302
+ "f1_stderr": 0.00842584449511441,
303
+ "main_score": 0.24603227975790182
304
+ },
305
+ "te": {
306
+ "accuracy": 0.2503698722259583,
307
+ "accuracy_stderr": 0.010974450116174157,
308
+ "f1": 0.2437196123281459,
309
+ "f1_stderr": 0.007768870065899431,
310
+ "main_score": 0.2503698722259583
311
+ },
312
+ "th": {
313
+ "accuracy": 0.35400134498991254,
314
+ "accuracy_stderr": 0.012277223814879825,
315
+ "f1": 0.35063600413688034,
316
+ "f1_stderr": 0.008031998429326455,
317
+ "main_score": 0.35400134498991254
318
+ },
319
+ "tl": {
320
+ "accuracy": 0.4119031607262945,
321
+ "accuracy_stderr": 0.014317194926727485,
322
+ "f1": 0.4024043230427301,
323
+ "f1_stderr": 0.009308382803276337,
324
+ "main_score": 0.4119031607262945
325
+ },
326
+ "tr": {
327
+ "accuracy": 0.3640551445864156,
328
+ "accuracy_stderr": 0.01158473822441319,
329
+ "f1": 0.3603844992856558,
330
+ "f1_stderr": 0.011004967374166683,
331
+ "main_score": 0.3640551445864156
332
+ },
333
+ "ur": {
334
+ "accuracy": 0.25934767989240076,
335
+ "accuracy_stderr": 0.011556400737346494,
336
+ "f1": 0.252074457023531,
337
+ "f1_stderr": 0.00828687176833062,
338
+ "main_score": 0.25934767989240076
339
+ },
340
+ "vi": {
341
+ "accuracy": 0.38799596503026224,
342
+ "accuracy_stderr": 0.012823731186170102,
343
+ "f1": 0.37160233794673125,
344
+ "f1_stderr": 0.013799233781790802,
345
+ "main_score": 0.38799596503026224
346
+ },
347
+ "zh-CN": {
348
+ "accuracy": 0.4624411566913248,
349
+ "accuracy_stderr": 0.01869309179104032,
350
+ "f1": 0.44367480561291905,
351
+ "f1_stderr": 0.01471127926363261,
352
+ "main_score": 0.4624411566913248
353
+ },
354
+ "zh-TW": {
355
+ "accuracy": 0.4230665770006724,
356
+ "accuracy_stderr": 0.015603332261143462,
357
+ "f1": 0.41964222328351397,
358
+ "f1_stderr": 0.013651788714198228,
359
+ "main_score": 0.4230665770006724
360
+ }
361
+ },
362
+ "validation": {
363
+ "af": {
364
+ "accuracy": 0.4180029513034924,
365
+ "accuracy_stderr": 0.010921152256864068,
366
+ "f1": 0.408564524920107,
367
+ "f1_stderr": 0.011580488915745207,
368
+ "main_score": 0.4180029513034924
369
+ },
370
+ "am": {
371
+ "accuracy": 0.22936546974913924,
372
+ "accuracy_stderr": 0.011915475401965652,
373
+ "f1": 0.22443454994948162,
374
+ "f1_stderr": 0.01293675359583084,
375
+ "main_score": 0.22936546974913924
376
+ },
377
+ "ar": {
378
+ "accuracy": 0.2941957697983276,
379
+ "accuracy_stderr": 0.013140667522280231,
380
+ "f1": 0.278025426878666,
381
+ "f1_stderr": 0.01146329357734503,
382
+ "main_score": 0.2941957697983276
383
+ },
384
+ "az": {
385
+ "accuracy": 0.3528283325135268,
386
+ "accuracy_stderr": 0.012346286276438762,
387
+ "f1": 0.3586288453850816,
388
+ "f1_stderr": 0.013126667211852334,
389
+ "main_score": 0.3528283325135268
390
+ },
391
+ "bn": {
392
+ "accuracy": 0.29242498770290204,
393
+ "accuracy_stderr": 0.012778098967926376,
394
+ "f1": 0.2743238187163509,
395
+ "f1_stderr": 0.007196357124921039,
396
+ "main_score": 0.29242498770290204
397
+ },
398
+ "cy": {
399
+ "accuracy": 0.4091982292179046,
400
+ "accuracy_stderr": 0.01750643809327536,
401
+ "f1": 0.3968229515847022,
402
+ "f1_stderr": 0.011600617097504318,
403
+ "main_score": 0.4091982292179046
404
+ },
405
+ "da": {
406
+ "accuracy": 0.4363994097393015,
407
+ "accuracy_stderr": 0.013211089954710703,
408
+ "f1": 0.41855164392134825,
409
+ "f1_stderr": 0.012090836177572879,
410
+ "main_score": 0.4363994097393015
411
+ },
412
+ "de": {
413
+ "accuracy": 0.4300049188391539,
414
+ "accuracy_stderr": 0.01189667797960137,
415
+ "f1": 0.40793611600487506,
416
+ "f1_stderr": 0.014239149206748571,
417
+ "main_score": 0.4300049188391539
418
+ },
419
+ "el": {
420
+ "accuracy": 0.3712739793408756,
421
+ "accuracy_stderr": 0.011689463686097046,
422
+ "f1": 0.3550955737747622,
423
+ "f1_stderr": 0.01024985130519696,
424
+ "main_score": 0.3712739793408756
425
+ },
426
+ "en": {
427
+ "accuracy": 0.6291687161829808,
428
+ "accuracy_stderr": 0.01686014188369135,
429
+ "f1": 0.6127498027362954,
430
+ "f1_stderr": 0.012061423584454146,
431
+ "main_score": 0.6291687161829808
432
+ },
433
+ "es": {
434
+ "accuracy": 0.43580914904082635,
435
+ "accuracy_stderr": 0.012224082295727747,
436
+ "f1": 0.42917002190317477,
437
+ "f1_stderr": 0.011405640175485206,
438
+ "main_score": 0.43580914904082635
439
+ },
440
+ "evaluation_time": 1881.4,
441
+ "fa": {
442
+ "accuracy": 0.3547466797835711,
443
+ "accuracy_stderr": 0.010601895712032338,
444
+ "f1": 0.33945762420706,
445
+ "f1_stderr": 0.009267694956350405,
446
+ "main_score": 0.3547466797835711
447
+ },
448
+ "fi": {
449
+ "accuracy": 0.40078701426463353,
450
+ "accuracy_stderr": 0.011417217998563597,
451
+ "f1": 0.3883449448052677,
452
+ "f1_stderr": 0.012122410583794865,
453
+ "main_score": 0.40078701426463353
454
+ },
455
+ "fr": {
456
+ "accuracy": 0.4429414658140679,
457
+ "accuracy_stderr": 0.015256729395188778,
458
+ "f1": 0.43394278610572457,
459
+ "f1_stderr": 0.014586034300029853,
460
+ "main_score": 0.4429414658140679
461
+ },
462
+ "he": {
463
+ "accuracy": 0.311460895228726,
464
+ "accuracy_stderr": 0.014779126801429157,
465
+ "f1": 0.2995793979884509,
466
+ "f1_stderr": 0.013615081888042758,
467
+ "main_score": 0.311460895228726
468
+ },
469
+ "hi": {
470
+ "accuracy": 0.25961633054599115,
471
+ "accuracy_stderr": 0.007878745912513887,
472
+ "f1": 0.25982460372695954,
473
+ "f1_stderr": 0.0071203448616418506,
474
+ "main_score": 0.25961633054599115
475
+ },
476
+ "hu": {
477
+ "accuracy": 0.3748155435317265,
478
+ "accuracy_stderr": 0.011499989267418064,
479
+ "f1": 0.36610577802929695,
480
+ "f1_stderr": 0.012185955975190215,
481
+ "main_score": 0.3748155435317265
482
+ },
483
+ "hy": {
484
+ "accuracy": 0.2815543531726513,
485
+ "accuracy_stderr": 0.013230122952822734,
486
+ "f1": 0.2770068958000932,
487
+ "f1_stderr": 0.01277048573808052,
488
+ "main_score": 0.2815543531726513
489
+ },
490
+ "id": {
491
+ "accuracy": 0.4424495818986719,
492
+ "accuracy_stderr": 0.011069574031270856,
493
+ "f1": 0.43510898494968553,
494
+ "f1_stderr": 0.0076463739176352115,
495
+ "main_score": 0.4424495818986719
496
+ },
497
+ "is": {
498
+ "accuracy": 0.40157402852926705,
499
+ "accuracy_stderr": 0.015005603921624816,
500
+ "f1": 0.3876853823428391,
501
+ "f1_stderr": 0.01201970040174986,
502
+ "main_score": 0.40157402852926705
503
+ },
504
+ "it": {
505
+ "accuracy": 0.456714215445155,
506
+ "accuracy_stderr": 0.015036288695625667,
507
+ "f1": 0.463502133111645,
508
+ "f1_stderr": 0.015187785561573016,
509
+ "main_score": 0.456714215445155
510
+ },
511
+ "ja": {
512
+ "accuracy": 0.4479094933595672,
513
+ "accuracy_stderr": 0.014653187990596124,
514
+ "f1": 0.4280973013012505,
515
+ "f1_stderr": 0.01655707191595756,
516
+ "main_score": 0.4479094933595672
517
+ },
518
+ "jv": {
519
+ "accuracy": 0.384505656665027,
520
+ "accuracy_stderr": 0.012104842420943921,
521
+ "f1": 0.380167978724446,
522
+ "f1_stderr": 0.008009104534033291,
523
+ "main_score": 0.384505656665027
524
+ },
525
+ "ka": {
526
+ "accuracy": 0.24835218888342353,
527
+ "accuracy_stderr": 0.013181112319925865,
528
+ "f1": 0.24709500138710574,
529
+ "f1_stderr": 0.01059101293402143,
530
+ "main_score": 0.24835218888342353
531
+ },
532
+ "km": {
533
+ "accuracy": 0.2742252828332513,
534
+ "accuracy_stderr": 0.011534127186569546,
535
+ "f1": 0.2602068523353439,
536
+ "f1_stderr": 0.012575174825235999,
537
+ "main_score": 0.2742252828332513
538
+ },
539
+ "kn": {
540
+ "accuracy": 0.2259222823413674,
541
+ "accuracy_stderr": 0.014722127495659583,
542
+ "f1": 0.21716530479138849,
543
+ "f1_stderr": 0.010961787165686507,
544
+ "main_score": 0.2259222823413674
545
+ },
546
+ "ko": {
547
+ "accuracy": 0.3673389080177078,
548
+ "accuracy_stderr": 0.013767126952905771,
549
+ "f1": 0.37221618799085243,
550
+ "f1_stderr": 0.014709606757115768,
551
+ "main_score": 0.3673389080177078
552
+ },
553
+ "lv": {
554
+ "accuracy": 0.4103295622233153,
555
+ "accuracy_stderr": 0.016045974090390387,
556
+ "f1": 0.40406596723582255,
557
+ "f1_stderr": 0.0069132258926177265,
558
+ "main_score": 0.4103295622233153
559
+ },
560
+ "ml": {
561
+ "accuracy": 0.24200688637481554,
562
+ "accuracy_stderr": 0.011347496474072452,
563
+ "f1": 0.23514331789309012,
564
+ "f1_stderr": 0.01204913680015678,
565
+ "main_score": 0.24200688637481554
566
+ },
567
+ "mn": {
568
+ "accuracy": 0.29070339399901624,
569
+ "accuracy_stderr": 0.01309926671195602,
570
+ "f1": 0.29527156314146025,
571
+ "f1_stderr": 0.010723196469553449,
572
+ "main_score": 0.29070339399901624
573
+ },
574
+ "ms": {
575
+ "accuracy": 0.42287260206591243,
576
+ "accuracy_stderr": 0.019597793502122884,
577
+ "f1": 0.41872839411817814,
578
+ "f1_stderr": 0.014555274766542817,
579
+ "main_score": 0.42287260206591243
580
+ },
581
+ "my": {
582
+ "accuracy": 0.24195769798327596,
583
+ "accuracy_stderr": 0.00958467079934785,
584
+ "f1": 0.23386230455157248,
585
+ "f1_stderr": 0.010002828286172527,
586
+ "main_score": 0.24195769798327596
587
+ },
588
+ "nb": {
589
+ "accuracy": 0.40196753566158383,
590
+ "accuracy_stderr": 0.014139930174690087,
591
+ "f1": 0.3918493283614314,
592
+ "f1_stderr": 0.013552558677777658,
593
+ "main_score": 0.40196753566158383
594
+ },
595
+ "nl": {
596
+ "accuracy": 0.4228726020659125,
597
+ "accuracy_stderr": 0.016822214166599285,
598
+ "f1": 0.4111008537872992,
599
+ "f1_stderr": 0.01409045918592202,
600
+ "main_score": 0.4228726020659125
601
+ },
602
+ "pl": {
603
+ "accuracy": 0.4145597638957206,
604
+ "accuracy_stderr": 0.020333079371071323,
605
+ "f1": 0.39761508941215074,
606
+ "f1_stderr": 0.015094892952504711,
607
+ "main_score": 0.4145597638957206
608
+ },
609
+ "pt": {
610
+ "accuracy": 0.45209050664043293,
611
+ "accuracy_stderr": 0.01835254418074865,
612
+ "f1": 0.4487810416996396,
613
+ "f1_stderr": 0.01196627715625292,
614
+ "main_score": 0.45209050664043293
615
+ },
616
+ "ro": {
617
+ "accuracy": 0.41559272011805215,
618
+ "accuracy_stderr": 0.008131571624077545,
619
+ "f1": 0.40039259678785666,
620
+ "f1_stderr": 0.00515152110757374,
621
+ "main_score": 0.41559272011805215
622
+ },
623
+ "ru": {
624
+ "accuracy": 0.35622233152975896,
625
+ "accuracy_stderr": 0.021799726881596905,
626
+ "f1": 0.34781156799018975,
627
+ "f1_stderr": 0.015093828682005378,
628
+ "main_score": 0.35622233152975896
629
+ },
630
+ "sl": {
631
+ "accuracy": 0.4014264633546484,
632
+ "accuracy_stderr": 0.012750044779015257,
633
+ "f1": 0.3891333558812916,
634
+ "f1_stderr": 0.011224536409706653,
635
+ "main_score": 0.4014264633546484
636
+ },
637
+ "sq": {
638
+ "accuracy": 0.43580914904082635,
639
+ "accuracy_stderr": 0.0145750441613665,
640
+ "f1": 0.4232383304393984,
641
+ "f1_stderr": 0.009598219357928728,
642
+ "main_score": 0.43580914904082635
643
+ },
644
+ "sv": {
645
+ "accuracy": 0.43433349729463844,
646
+ "accuracy_stderr": 0.012787089799130798,
647
+ "f1": 0.4239334259272507,
648
+ "f1_stderr": 0.010594787809411465,
649
+ "main_score": 0.43433349729463844
650
+ },
651
+ "sw": {
652
+ "accuracy": 0.4067879980324644,
653
+ "accuracy_stderr": 0.01684801143160469,
654
+ "f1": 0.3915460778697146,
655
+ "f1_stderr": 0.012876417288175717,
656
+ "main_score": 0.4067879980324644
657
+ },
658
+ "ta": {
659
+ "accuracy": 0.2374815543531726,
660
+ "accuracy_stderr": 0.007291836875634601,
661
+ "f1": 0.23306972516495983,
662
+ "f1_stderr": 0.006823162268817619,
663
+ "main_score": 0.2374815543531726
664
+ },
665
+ "te": {
666
+ "accuracy": 0.24535169699950812,
667
+ "accuracy_stderr": 0.010303295016847668,
668
+ "f1": 0.23951706003507978,
669
+ "f1_stderr": 0.012099393447774153,
670
+ "main_score": 0.24535169699950812
671
+ },
672
+ "th": {
673
+ "accuracy": 0.3489424495818987,
674
+ "accuracy_stderr": 0.011552886222641766,
675
+ "f1": 0.33677383997436106,
676
+ "f1_stderr": 0.006869691313087882,
677
+ "main_score": 0.3489424495818987
678
+ },
679
+ "tl": {
680
+ "accuracy": 0.4083620265617315,
681
+ "accuracy_stderr": 0.014225228406157971,
682
+ "f1": 0.3990401121375912,
683
+ "f1_stderr": 0.014254629179738524,
684
+ "main_score": 0.4083620265617315
685
+ },
686
+ "tr": {
687
+ "accuracy": 0.361829808165273,
688
+ "accuracy_stderr": 0.018463935842145066,
689
+ "f1": 0.3560425832290258,
690
+ "f1_stderr": 0.015144371708336025,
691
+ "main_score": 0.361829808165273
692
+ },
693
+ "ur": {
694
+ "accuracy": 0.25853418593212,
695
+ "accuracy_stderr": 0.011733674034744004,
696
+ "f1": 0.25059229515932524,
697
+ "f1_stderr": 0.010461858886336843,
698
+ "main_score": 0.25853418593212
699
+ },
700
+ "vi": {
701
+ "accuracy": 0.38180029513034924,
702
+ "accuracy_stderr": 0.014392627591048972,
703
+ "f1": 0.35555792018834453,
704
+ "f1_stderr": 0.014687028346239177,
705
+ "main_score": 0.38180029513034924
706
+ },
707
+ "zh-CN": {
708
+ "accuracy": 0.46173143138219375,
709
+ "accuracy_stderr": 0.01566177233818183,
710
+ "f1": 0.45269945997397354,
711
+ "f1_stderr": 0.012845282312811623,
712
+ "main_score": 0.46173143138219375
713
+ },
714
+ "zh-TW": {
715
+ "accuracy": 0.4192326610919824,
716
+ "accuracy_stderr": 0.01305866106572301,
717
+ "f1": 0.42394738901751217,
718
+ "f1_stderr": 0.012561702136094713,
719
+ "main_score": 0.4192326610919824
720
+ }
721
+ },
722
+ "dataset_version": null,
723
+ "mteb_version": "0.0.2"
724
+ }
evaluation/mteb/MassiveScenarioClassification.json ADDED
@@ -0,0 +1,724 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "af": {
4
+ "accuracy": 0.43248150638870203,
5
+ "accuracy_stderr": 0.01914237726930163,
6
+ "f1": 0.40924230769590786,
7
+ "f1_stderr": 0.017594605316604825,
8
+ "main_score": 0.43248150638870203
9
+ },
10
+ "am": {
11
+ "accuracy": 0.2530262273032952,
12
+ "accuracy_stderr": 0.0171025727714938,
13
+ "f1": 0.24937105830264067,
14
+ "f1_stderr": 0.014061996186754674,
15
+ "main_score": 0.2530262273032952
16
+ },
17
+ "ar": {
18
+ "accuracy": 0.3207128446536651,
19
+ "accuracy_stderr": 0.013430382612278666,
20
+ "f1": 0.3180245816594883,
21
+ "f1_stderr": 0.01399700671221373,
22
+ "main_score": 0.3207128446536651
23
+ },
24
+ "az": {
25
+ "accuracy": 0.3668123739071957,
26
+ "accuracy_stderr": 0.017293355231396697,
27
+ "f1": 0.3637219042508338,
28
+ "f1_stderr": 0.013273690138493005,
29
+ "main_score": 0.3668123739071957
30
+ },
31
+ "bn": {
32
+ "accuracy": 0.2956624075319435,
33
+ "accuracy_stderr": 0.01956084843583552,
34
+ "f1": 0.2838604205636276,
35
+ "f1_stderr": 0.01649525524185898,
36
+ "main_score": 0.2956624075319435
37
+ },
38
+ "cy": {
39
+ "accuracy": 0.421049092131809,
40
+ "accuracy_stderr": 0.01842488912674414,
41
+ "f1": 0.38926150886991295,
42
+ "f1_stderr": 0.013736606444137845,
43
+ "main_score": 0.421049092131809
44
+ },
45
+ "da": {
46
+ "accuracy": 0.4544384667114997,
47
+ "accuracy_stderr": 0.018828807973369888,
48
+ "f1": 0.42578252395460003,
49
+ "f1_stderr": 0.01878751493281785,
50
+ "main_score": 0.4544384667114997
51
+ },
52
+ "de": {
53
+ "accuracy": 0.43211163416274373,
54
+ "accuracy_stderr": 0.015219386970045799,
55
+ "f1": 0.41044658583047894,
56
+ "f1_stderr": 0.012995583154456956,
57
+ "main_score": 0.43211163416274373
58
+ },
59
+ "el": {
60
+ "accuracy": 0.3650302622730329,
61
+ "accuracy_stderr": 0.014415725865475081,
62
+ "f1": 0.3449785095312759,
63
+ "f1_stderr": 0.015562024871571323,
64
+ "main_score": 0.3650302622730329
65
+ },
66
+ "en": {
67
+ "accuracy": 0.6973772696704774,
68
+ "accuracy_stderr": 0.011262031912799892,
69
+ "f1": 0.6921759502909044,
70
+ "f1_stderr": 0.013799487672182479,
71
+ "main_score": 0.6973772696704774
72
+ },
73
+ "es": {
74
+ "accuracy": 0.44078681909885675,
75
+ "accuracy_stderr": 0.018595701586506804,
76
+ "f1": 0.4305914426901129,
77
+ "f1_stderr": 0.016625685820030444,
78
+ "main_score": 0.44078681909885675
79
+ },
80
+ "evaluation_time": 1815.94,
81
+ "fa": {
82
+ "accuracy": 0.32612642905178213,
83
+ "accuracy_stderr": 0.012812264412745333,
84
+ "f1": 0.3202463177462754,
85
+ "f1_stderr": 0.012798802292501087,
86
+ "main_score": 0.32612642905178213
87
+ },
88
+ "fi": {
89
+ "accuracy": 0.40356422326832553,
90
+ "accuracy_stderr": 0.014078935277749945,
91
+ "f1": 0.3813642481807678,
92
+ "f1_stderr": 0.012692369056549692,
93
+ "main_score": 0.40356422326832553
94
+ },
95
+ "fr": {
96
+ "accuracy": 0.4506724949562878,
97
+ "accuracy_stderr": 0.015178175408214766,
98
+ "f1": 0.4319827608343738,
99
+ "f1_stderr": 0.014020710010605711,
100
+ "main_score": 0.4506724949562878
101
+ },
102
+ "he": {
103
+ "accuracy": 0.3217888365837256,
104
+ "accuracy_stderr": 0.02418084644697299,
105
+ "f1": 0.29979761884698775,
106
+ "f1_stderr": 0.020294571210800923,
107
+ "main_score": 0.3217888365837256
108
+ },
109
+ "hi": {
110
+ "accuracy": 0.26903160726294556,
111
+ "accuracy_stderr": 0.02497685106922395,
112
+ "f1": 0.25833010434083364,
113
+ "f1_stderr": 0.021329106099270956,
114
+ "main_score": 0.26903160726294556
115
+ },
116
+ "hu": {
117
+ "accuracy": 0.4037995965030262,
118
+ "accuracy_stderr": 0.020340860350433913,
119
+ "f1": 0.37931343552928826,
120
+ "f1_stderr": 0.01617066924214891,
121
+ "main_score": 0.4037995965030262
122
+ },
123
+ "hy": {
124
+ "accuracy": 0.28375924680564896,
125
+ "accuracy_stderr": 0.02244735882940363,
126
+ "f1": 0.2696255693013172,
127
+ "f1_stderr": 0.017311893310356832,
128
+ "main_score": 0.28375924680564896
129
+ },
130
+ "id": {
131
+ "accuracy": 0.44361129791526566,
132
+ "accuracy_stderr": 0.022926086894172665,
133
+ "f1": 0.4354445012295126,
134
+ "f1_stderr": 0.020284381381570574,
135
+ "main_score": 0.44361129791526566
136
+ },
137
+ "is": {
138
+ "accuracy": 0.39290517821116344,
139
+ "accuracy_stderr": 0.02370210834656748,
140
+ "f1": 0.3726982052174147,
141
+ "f1_stderr": 0.019988763380559,
142
+ "main_score": 0.39290517821116344
143
+ },
144
+ "it": {
145
+ "accuracy": 0.46469401479488903,
146
+ "accuracy_stderr": 0.018658554046491128,
147
+ "f1": 0.44060986162841564,
148
+ "f1_stderr": 0.017892736302635378,
149
+ "main_score": 0.46469401479488903
150
+ },
151
+ "ja": {
152
+ "accuracy": 0.46257565568258235,
153
+ "accuracy_stderr": 0.018244361807715694,
154
+ "f1": 0.4562513945675882,
155
+ "f1_stderr": 0.016350188403047698,
156
+ "main_score": 0.46257565568258235
157
+ },
158
+ "jv": {
159
+ "accuracy": 0.41126429051782115,
160
+ "accuracy_stderr": 0.018576843941362883,
161
+ "f1": 0.3954392378396527,
162
+ "f1_stderr": 0.011255653042251292,
163
+ "main_score": 0.41126429051782115
164
+ },
165
+ "ka": {
166
+ "accuracy": 0.24727639542703428,
167
+ "accuracy_stderr": 0.018326690606661058,
168
+ "f1": 0.23337743140804484,
169
+ "f1_stderr": 0.011940429119171217,
170
+ "main_score": 0.24727639542703428
171
+ },
172
+ "km": {
173
+ "accuracy": 0.2974108944182918,
174
+ "accuracy_stderr": 0.013625492982156541,
175
+ "f1": 0.2757087619008375,
176
+ "f1_stderr": 0.01055451354659993,
177
+ "main_score": 0.2974108944182918
178
+ },
179
+ "kn": {
180
+ "accuracy": 0.23850033624747816,
181
+ "accuracy_stderr": 0.014455552445217143,
182
+ "f1": 0.2286733484540032,
183
+ "f1_stderr": 0.010992305614270776,
184
+ "main_score": 0.23850033624747816
185
+ },
186
+ "ko": {
187
+ "accuracy": 0.3656691324815064,
188
+ "accuracy_stderr": 0.020280629432627,
189
+ "f1": 0.35504081677134564,
190
+ "f1_stderr": 0.019299833634584138,
191
+ "main_score": 0.3656691324815064
192
+ },
193
+ "lv": {
194
+ "accuracy": 0.40928043039677203,
195
+ "accuracy_stderr": 0.017879262085215602,
196
+ "f1": 0.3910858913121125,
197
+ "f1_stderr": 0.015622608383011384,
198
+ "main_score": 0.40928043039677203
199
+ },
200
+ "ml": {
201
+ "accuracy": 0.25527908540685945,
202
+ "accuracy_stderr": 0.012492668582100332,
203
+ "f1": 0.25333391622280477,
204
+ "f1_stderr": 0.011431795353486644,
205
+ "main_score": 0.25527908540685945
206
+ },
207
+ "mn": {
208
+ "accuracy": 0.29105581708137185,
209
+ "accuracy_stderr": 0.02289852732480194,
210
+ "f1": 0.28478235012692815,
211
+ "f1_stderr": 0.0211390543174164,
212
+ "main_score": 0.29105581708137185
213
+ },
214
+ "ms": {
215
+ "accuracy": 0.43786146603900467,
216
+ "accuracy_stderr": 0.02361491677556193,
217
+ "f1": 0.41964014392626703,
218
+ "f1_stderr": 0.016224233488107753,
219
+ "main_score": 0.43786146603900467
220
+ },
221
+ "my": {
222
+ "accuracy": 0.27269670477471414,
223
+ "accuracy_stderr": 0.017084548735816784,
224
+ "f1": 0.26228386764141853,
225
+ "f1_stderr": 0.01770505820877428,
226
+ "main_score": 0.27269670477471414
227
+ },
228
+ "nb": {
229
+ "accuracy": 0.3901815736381977,
230
+ "accuracy_stderr": 0.02356766226099208,
231
+ "f1": 0.37641949339321856,
232
+ "f1_stderr": 0.018189340920191487,
233
+ "main_score": 0.3901815736381977
234
+ },
235
+ "nl": {
236
+ "accuracy": 0.4535978480161399,
237
+ "accuracy_stderr": 0.016327230257174263,
238
+ "f1": 0.426851176096831,
239
+ "f1_stderr": 0.009526114688499471,
240
+ "main_score": 0.4535978480161399
241
+ },
242
+ "pl": {
243
+ "accuracy": 0.41893073301950234,
244
+ "accuracy_stderr": 0.020686982211902172,
245
+ "f1": 0.4088871064261502,
246
+ "f1_stderr": 0.019791742479992352,
247
+ "main_score": 0.41893073301950234
248
+ },
249
+ "pt": {
250
+ "accuracy": 0.45901143241425685,
251
+ "accuracy_stderr": 0.017537457772563485,
252
+ "f1": 0.44496942353920543,
253
+ "f1_stderr": 0.016580297609253208,
254
+ "main_score": 0.45901143241425685
255
+ },
256
+ "ro": {
257
+ "accuracy": 0.44115669132481505,
258
+ "accuracy_stderr": 0.017138728900302158,
259
+ "f1": 0.41953945105870616,
260
+ "f1_stderr": 0.01663750637309216,
261
+ "main_score": 0.44115669132481505
262
+ },
263
+ "ru": {
264
+ "accuracy": 0.3276395427034297,
265
+ "accuracy_stderr": 0.01520582329589761,
266
+ "f1": 0.31436372571600935,
267
+ "f1_stderr": 0.016822070079219324,
268
+ "main_score": 0.3276395427034297
269
+ },
270
+ "sl": {
271
+ "accuracy": 0.40504371217215873,
272
+ "accuracy_stderr": 0.01737927871109968,
273
+ "f1": 0.39322752749628165,
274
+ "f1_stderr": 0.016021377230910933,
275
+ "main_score": 0.40504371217215873
276
+ },
277
+ "sq": {
278
+ "accuracy": 0.4251849361129792,
279
+ "accuracy_stderr": 0.02703155777439191,
280
+ "f1": 0.41413929711846303,
281
+ "f1_stderr": 0.02203846614787482,
282
+ "main_score": 0.4251849361129792
283
+ },
284
+ "sv": {
285
+ "accuracy": 0.42293207800941496,
286
+ "accuracy_stderr": 0.02634507038010069,
287
+ "f1": 0.4050409536806683,
288
+ "f1_stderr": 0.021882375504727304,
289
+ "main_score": 0.42293207800941496
290
+ },
291
+ "sw": {
292
+ "accuracy": 0.42999327505043705,
293
+ "accuracy_stderr": 0.015164574873190428,
294
+ "f1": 0.4104541622497327,
295
+ "f1_stderr": 0.01416304033082228,
296
+ "main_score": 0.42999327505043705
297
+ },
298
+ "ta": {
299
+ "accuracy": 0.2832548755884331,
300
+ "accuracy_stderr": 0.017499864243874726,
301
+ "f1": 0.2727684199556187,
302
+ "f1_stderr": 0.017340547403638454,
303
+ "main_score": 0.2832548755884331
304
+ },
305
+ "te": {
306
+ "accuracy": 0.26593813046402154,
307
+ "accuracy_stderr": 0.021809193915635242,
308
+ "f1": 0.25483878616197586,
309
+ "f1_stderr": 0.019449647389494947,
310
+ "main_score": 0.26593813046402154
311
+ },
312
+ "th": {
313
+ "accuracy": 0.36788836583725626,
314
+ "accuracy_stderr": 0.01545089176597426,
315
+ "f1": 0.34603932909177687,
316
+ "f1_stderr": 0.016869984806312827,
317
+ "main_score": 0.36788836583725626
318
+ },
319
+ "tl": {
320
+ "accuracy": 0.425689307330195,
321
+ "accuracy_stderr": 0.015430059348496856,
322
+ "f1": 0.40924469309079825,
323
+ "f1_stderr": 0.008776200992571783,
324
+ "main_score": 0.425689307330195
325
+ },
326
+ "tr": {
327
+ "accuracy": 0.37094821788836585,
328
+ "accuracy_stderr": 0.022152967877636806,
329
+ "f1": 0.3794962882285716,
330
+ "f1_stderr": 0.0210046248379818,
331
+ "main_score": 0.37094821788836585
332
+ },
333
+ "ur": {
334
+ "accuracy": 0.2883658372562206,
335
+ "accuracy_stderr": 0.02184377077895051,
336
+ "f1": 0.2780655865551234,
337
+ "f1_stderr": 0.021979806560091308,
338
+ "main_score": 0.2883658372562206
339
+ },
340
+ "vi": {
341
+ "accuracy": 0.37357094821788833,
342
+ "accuracy_stderr": 0.015912003760378605,
343
+ "f1": 0.3750791896103816,
344
+ "f1_stderr": 0.013705906683792032,
345
+ "main_score": 0.37357094821788833
346
+ },
347
+ "zh-CN": {
348
+ "accuracy": 0.4937794216543375,
349
+ "accuracy_stderr": 0.014582638723526031,
350
+ "f1": 0.4720421153697707,
351
+ "f1_stderr": 0.014185512249352985,
352
+ "main_score": 0.4937794216543375
353
+ },
354
+ "zh-TW": {
355
+ "accuracy": 0.44421654337592476,
356
+ "accuracy_stderr": 0.027457834005907886,
357
+ "f1": 0.4434741861198931,
358
+ "f1_stderr": 0.02234941824008831,
359
+ "main_score": 0.44421654337592476
360
+ }
361
+ },
362
+ "validation": {
363
+ "af": {
364
+ "accuracy": 0.42297097884899165,
365
+ "accuracy_stderr": 0.01949058592896654,
366
+ "f1": 0.41228412552668264,
367
+ "f1_stderr": 0.02072529581458811,
368
+ "main_score": 0.42297097884899165
369
+ },
370
+ "am": {
371
+ "accuracy": 0.2450565666502705,
372
+ "accuracy_stderr": 0.008479256952982387,
373
+ "f1": 0.24806662079898306,
374
+ "f1_stderr": 0.008780275014937335,
375
+ "main_score": 0.2450565666502705
376
+ },
377
+ "ar": {
378
+ "accuracy": 0.3055582882439744,
379
+ "accuracy_stderr": 0.017670617309841773,
380
+ "f1": 0.3069995212499811,
381
+ "f1_stderr": 0.015362354242331443,
382
+ "main_score": 0.3055582882439744
383
+ },
384
+ "az": {
385
+ "accuracy": 0.3621249385145106,
386
+ "accuracy_stderr": 0.012152220847254775,
387
+ "f1": 0.36357765091456506,
388
+ "f1_stderr": 0.010139140710942176,
389
+ "main_score": 0.3621249385145106
390
+ },
391
+ "bn": {
392
+ "accuracy": 0.2971470732907034,
393
+ "accuracy_stderr": 0.022069587561896618,
394
+ "f1": 0.2882207128496783,
395
+ "f1_stderr": 0.01683479482879926,
396
+ "main_score": 0.2971470732907034
397
+ },
398
+ "cy": {
399
+ "accuracy": 0.4136251844564683,
400
+ "accuracy_stderr": 0.024426828453818297,
401
+ "f1": 0.39296855913661843,
402
+ "f1_stderr": 0.020973943849242817,
403
+ "main_score": 0.4136251844564683
404
+ },
405
+ "da": {
406
+ "accuracy": 0.43807181505164783,
407
+ "accuracy_stderr": 0.017869947001923518,
408
+ "f1": 0.4206737421170841,
409
+ "f1_stderr": 0.016872412523955146,
410
+ "main_score": 0.43807181505164783
411
+ },
412
+ "de": {
413
+ "accuracy": 0.43133300541072306,
414
+ "accuracy_stderr": 0.020020883880127186,
415
+ "f1": 0.416915841608262,
416
+ "f1_stderr": 0.021672651279716317,
417
+ "main_score": 0.43133300541072306
418
+ },
419
+ "el": {
420
+ "accuracy": 0.3577471716674865,
421
+ "accuracy_stderr": 0.01701527354158656,
422
+ "f1": 0.34927355878305144,
423
+ "f1_stderr": 0.016442978728160182,
424
+ "main_score": 0.3577471716674865
425
+ },
426
+ "en": {
427
+ "accuracy": 0.7017707820954255,
428
+ "accuracy_stderr": 0.016159041704017264,
429
+ "f1": 0.6966771799036044,
430
+ "f1_stderr": 0.01608645681525308,
431
+ "main_score": 0.7017707820954255
432
+ },
433
+ "es": {
434
+ "accuracy": 0.43846532218396456,
435
+ "accuracy_stderr": 0.02347394558102388,
436
+ "f1": 0.4343598552554334,
437
+ "f1_stderr": 0.0181860533015881,
438
+ "main_score": 0.43846532218396456
439
+ },
440
+ "evaluation_time": 1332.6,
441
+ "fa": {
442
+ "accuracy": 0.33084112149532713,
443
+ "accuracy_stderr": 0.011130497227832424,
444
+ "f1": 0.33401019078365096,
445
+ "f1_stderr": 0.013062402784378667,
446
+ "main_score": 0.33084112149532713
447
+ },
448
+ "fi": {
449
+ "accuracy": 0.39316281357599603,
450
+ "accuracy_stderr": 0.01045771116792211,
451
+ "f1": 0.3785817393037779,
452
+ "f1_stderr": 0.013819345424261865,
453
+ "main_score": 0.39316281357599603
454
+ },
455
+ "fr": {
456
+ "accuracy": 0.4451549434333497,
457
+ "accuracy_stderr": 0.015595125043219043,
458
+ "f1": 0.4346460544394509,
459
+ "f1_stderr": 0.013779384653720472,
460
+ "main_score": 0.4451549434333497
461
+ },
462
+ "he": {
463
+ "accuracy": 0.3149532710280374,
464
+ "accuracy_stderr": 0.023701506980689756,
465
+ "f1": 0.3011898277187477,
466
+ "f1_stderr": 0.02093714853708861,
467
+ "main_score": 0.3149532710280374
468
+ },
469
+ "hi": {
470
+ "accuracy": 0.26099360550909984,
471
+ "accuracy_stderr": 0.020137465573030665,
472
+ "f1": 0.2551702530489754,
473
+ "f1_stderr": 0.016256501108798407,
474
+ "main_score": 0.26099360550909984
475
+ },
476
+ "hu": {
477
+ "accuracy": 0.3777668470241023,
478
+ "accuracy_stderr": 0.01415737274034313,
479
+ "f1": 0.3636064864884589,
480
+ "f1_stderr": 0.009160637137956562,
481
+ "main_score": 0.3777668470241023
482
+ },
483
+ "hy": {
484
+ "accuracy": 0.28558780127889816,
485
+ "accuracy_stderr": 0.01877837310408312,
486
+ "f1": 0.2772579956450185,
487
+ "f1_stderr": 0.015424825177141353,
488
+ "main_score": 0.28558780127889816
489
+ },
490
+ "id": {
491
+ "accuracy": 0.4339399901623217,
492
+ "accuracy_stderr": 0.014721716629155198,
493
+ "f1": 0.4297916006449869,
494
+ "f1_stderr": 0.01354829266304612,
495
+ "main_score": 0.4339399901623217
496
+ },
497
+ "is": {
498
+ "accuracy": 0.3939498278406296,
499
+ "accuracy_stderr": 0.019989438522709065,
500
+ "f1": 0.37779417546607796,
501
+ "f1_stderr": 0.015353564518631574,
502
+ "main_score": 0.3939498278406296
503
+ },
504
+ "it": {
505
+ "accuracy": 0.45327102803738323,
506
+ "accuracy_stderr": 0.01699848617376043,
507
+ "f1": 0.44153892466033684,
508
+ "f1_stderr": 0.01882228199455719,
509
+ "main_score": 0.45327102803738323
510
+ },
511
+ "ja": {
512
+ "accuracy": 0.4578455484505656,
513
+ "accuracy_stderr": 0.020281847759727148,
514
+ "f1": 0.45729859048271465,
515
+ "f1_stderr": 0.01661016283738532,
516
+ "main_score": 0.4578455484505656
517
+ },
518
+ "jv": {
519
+ "accuracy": 0.39758976881455976,
520
+ "accuracy_stderr": 0.01707347422003101,
521
+ "f1": 0.39358025541686337,
522
+ "f1_stderr": 0.014149347886283038,
523
+ "main_score": 0.39758976881455976
524
+ },
525
+ "ka": {
526
+ "accuracy": 0.2424003935071323,
527
+ "accuracy_stderr": 0.019907387368651303,
528
+ "f1": 0.23256193506176298,
529
+ "f1_stderr": 0.015099183200210618,
530
+ "main_score": 0.2424003935071323
531
+ },
532
+ "km": {
533
+ "accuracy": 0.2993605509099853,
534
+ "accuracy_stderr": 0.013532083217551187,
535
+ "f1": 0.2809200454959333,
536
+ "f1_stderr": 0.009393641610664295,
537
+ "main_score": 0.2993605509099853
538
+ },
539
+ "kn": {
540
+ "accuracy": 0.23177570093457942,
541
+ "accuracy_stderr": 0.018427996145927942,
542
+ "f1": 0.22821426633751796,
543
+ "f1_stderr": 0.017679796969382704,
544
+ "main_score": 0.23177570093457942
545
+ },
546
+ "ko": {
547
+ "accuracy": 0.3586817511067388,
548
+ "accuracy_stderr": 0.021898282999062484,
549
+ "f1": 0.35579471530107626,
550
+ "f1_stderr": 0.019292875338196964,
551
+ "main_score": 0.3586817511067388
552
+ },
553
+ "lv": {
554
+ "accuracy": 0.4004426955238564,
555
+ "accuracy_stderr": 0.014973725234459603,
556
+ "f1": 0.387872887807314,
557
+ "f1_stderr": 0.015108580814192384,
558
+ "main_score": 0.4004426955238564
559
+ },
560
+ "ml": {
561
+ "accuracy": 0.24756517461878996,
562
+ "accuracy_stderr": 0.012515618686269558,
563
+ "f1": 0.24899937888159857,
564
+ "f1_stderr": 0.012555787929717375,
565
+ "main_score": 0.24756517461878996
566
+ },
567
+ "mn": {
568
+ "accuracy": 0.2843580914904083,
569
+ "accuracy_stderr": 0.019142375289924282,
570
+ "f1": 0.2816633850287075,
571
+ "f1_stderr": 0.01723360155398601,
572
+ "main_score": 0.2843580914904083
573
+ },
574
+ "ms": {
575
+ "accuracy": 0.43320216428922764,
576
+ "accuracy_stderr": 0.02159818527886489,
577
+ "f1": 0.4249245664682754,
578
+ "f1_stderr": 0.014510663777917886,
579
+ "main_score": 0.43320216428922764
580
+ },
581
+ "my": {
582
+ "accuracy": 0.26463354648302995,
583
+ "accuracy_stderr": 0.019322963284725016,
584
+ "f1": 0.2608329884167839,
585
+ "f1_stderr": 0.02115978672739665,
586
+ "main_score": 0.26463354648302995
587
+ },
588
+ "nb": {
589
+ "accuracy": 0.3869650762420069,
590
+ "accuracy_stderr": 0.024525284668151657,
591
+ "f1": 0.3814300772740455,
592
+ "f1_stderr": 0.020457575337737684,
593
+ "main_score": 0.3869650762420069
594
+ },
595
+ "nl": {
596
+ "accuracy": 0.4398425971470733,
597
+ "accuracy_stderr": 0.01625719128512154,
598
+ "f1": 0.42260609210168926,
599
+ "f1_stderr": 0.011560492888814633,
600
+ "main_score": 0.4398425971470733
601
+ },
602
+ "pl": {
603
+ "accuracy": 0.40083620265617326,
604
+ "accuracy_stderr": 0.022349561833830214,
605
+ "f1": 0.3991485290358368,
606
+ "f1_stderr": 0.01999404587448531,
607
+ "main_score": 0.40083620265617326
608
+ },
609
+ "pt": {
610
+ "accuracy": 0.45031972454500735,
611
+ "accuracy_stderr": 0.01750477954059943,
612
+ "f1": 0.4437398935443329,
613
+ "f1_stderr": 0.018517944459345348,
614
+ "main_score": 0.45031972454500735
615
+ },
616
+ "ro": {
617
+ "accuracy": 0.43812100344318744,
618
+ "accuracy_stderr": 0.021415938790220652,
619
+ "f1": 0.42362645820764067,
620
+ "f1_stderr": 0.018731210126253738,
621
+ "main_score": 0.43812100344318744
622
+ },
623
+ "ru": {
624
+ "accuracy": 0.33271028037383177,
625
+ "accuracy_stderr": 0.022300521711360505,
626
+ "f1": 0.32468290003396405,
627
+ "f1_stderr": 0.021982683919350685,
628
+ "main_score": 0.33271028037383177
629
+ },
630
+ "sl": {
631
+ "accuracy": 0.3973930152484014,
632
+ "accuracy_stderr": 0.018751618725414546,
633
+ "f1": 0.392100156522409,
634
+ "f1_stderr": 0.014838827675853575,
635
+ "main_score": 0.3973930152484014
636
+ },
637
+ "sq": {
638
+ "accuracy": 0.4194786030496803,
639
+ "accuracy_stderr": 0.02354660171070067,
640
+ "f1": 0.4169388707660075,
641
+ "f1_stderr": 0.017217415251225807,
642
+ "main_score": 0.4194786030496803
643
+ },
644
+ "sv": {
645
+ "accuracy": 0.42543039842597147,
646
+ "accuracy_stderr": 0.021562756889860635,
647
+ "f1": 0.4177387745760652,
648
+ "f1_stderr": 0.019505579966050932,
649
+ "main_score": 0.42543039842597147
650
+ },
651
+ "sw": {
652
+ "accuracy": 0.4136251844564683,
653
+ "accuracy_stderr": 0.012985083231251377,
654
+ "f1": 0.40084113718397746,
655
+ "f1_stderr": 0.00942359858438975,
656
+ "main_score": 0.4136251844564683
657
+ },
658
+ "ta": {
659
+ "accuracy": 0.2627643876045253,
660
+ "accuracy_stderr": 0.014139930174690075,
661
+ "f1": 0.25844837599995923,
662
+ "f1_stderr": 0.012773160336082992,
663
+ "main_score": 0.2627643876045253
664
+ },
665
+ "te": {
666
+ "accuracy": 0.2584358091490408,
667
+ "accuracy_stderr": 0.016944240742103198,
668
+ "f1": 0.2514435908623849,
669
+ "f1_stderr": 0.012722578409134445,
670
+ "main_score": 0.2584358091490408
671
+ },
672
+ "th": {
673
+ "accuracy": 0.35833743236596166,
674
+ "accuracy_stderr": 0.013268562919427823,
675
+ "f1": 0.3419105153327198,
676
+ "f1_stderr": 0.01289016362222908,
677
+ "main_score": 0.35833743236596166
678
+ },
679
+ "tl": {
680
+ "accuracy": 0.4170191834727004,
681
+ "accuracy_stderr": 0.018806183012845677,
682
+ "f1": 0.409812860893571,
683
+ "f1_stderr": 0.014175082125879828,
684
+ "main_score": 0.4170191834727004
685
+ },
686
+ "tr": {
687
+ "accuracy": 0.3648303000491884,
688
+ "accuracy_stderr": 0.024139475649706935,
689
+ "f1": 0.3753352910025087,
690
+ "f1_stderr": 0.020068175204176857,
691
+ "main_score": 0.3648303000491884
692
+ },
693
+ "ur": {
694
+ "accuracy": 0.27668470241023124,
695
+ "accuracy_stderr": 0.016621382716288085,
696
+ "f1": 0.2703604403224913,
697
+ "f1_stderr": 0.018845793770781712,
698
+ "main_score": 0.27668470241023124
699
+ },
700
+ "vi": {
701
+ "accuracy": 0.3775700934579439,
702
+ "accuracy_stderr": 0.017434075812833565,
703
+ "f1": 0.37914946223283774,
704
+ "f1_stderr": 0.016214172379444072,
705
+ "main_score": 0.3775700934579439
706
+ },
707
+ "zh-CN": {
708
+ "accuracy": 0.48888342351205105,
709
+ "accuracy_stderr": 0.014925171646575468,
710
+ "f1": 0.4748460692222126,
711
+ "f1_stderr": 0.017161662351747747,
712
+ "main_score": 0.48888342351205105
713
+ },
714
+ "zh-TW": {
715
+ "accuracy": 0.43866207575012295,
716
+ "accuracy_stderr": 0.02551147284957334,
717
+ "f1": 0.4429342869790666,
718
+ "f1_stderr": 0.020015637537278582,
719
+ "main_score": 0.43866207575012295
720
+ }
721
+ },
722
+ "dataset_version": null,
723
+ "mteb_version": "0.0.2"
724
+ }
evaluation/mteb/MedrxivClusteringP2P.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "evaluation_time": 288.36,
4
+ "v_measure": 0.31374938993074253,
5
+ "v_measure_std": 0.011604811334004275
6
+ },
7
+ "dataset_version": null,
8
+ "mteb_version": "0.0.2"
9
+ }
evaluation/mteb/MedrxivClusteringS2S.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "evaluation_time": 42.83,
4
+ "v_measure": 0.26871455379644094,
5
+ "v_measure_std": 0.015129306688366255
6
+ },
7
+ "dataset_version": null,
8
+ "mteb_version": "0.0.2"
9
+ }
evaluation/mteb/MindSmallReranking.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 1925.88,
6
+ "map": 0.30402396942935334,
7
+ "mrr": 0.3142600938803256
8
+ }
9
+ }
evaluation/mteb/NFCorpus.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 69.17,
6
+ "map_at_1": 0.03774,
7
+ "map_at_10": 0.07615,
8
+ "map_at_100": 0.09574,
9
+ "map_at_1000": 0.10711,
10
+ "map_at_3": 0.05754,
11
+ "map_at_5": 0.06666,
12
+ "mrr_at_1": 0.33127,
13
+ "mrr_at_10": 0.40351,
14
+ "mrr_at_100": 0.41144,
15
+ "mrr_at_1000": 0.41202,
16
+ "mrr_at_3": 0.38029,
17
+ "mrr_at_5": 0.3919,
18
+ "ndcg_at_1": 0.31579,
19
+ "ndcg_at_10": 0.22792,
20
+ "ndcg_at_100": 0.21699,
21
+ "ndcg_at_1000": 0.30893,
22
+ "ndcg_at_3": 0.26829,
23
+ "ndcg_at_5": 0.25119,
24
+ "precision_at_1": 0.33127,
25
+ "precision_at_10": 0.16718,
26
+ "precision_at_100": 0.05709,
27
+ "precision_at_1000": 0.01836,
28
+ "precision_at_3": 0.24768,
29
+ "precision_at_5": 0.213,
30
+ "recall_at_1": 0.03774,
31
+ "recall_at_10": 0.10303,
32
+ "recall_at_100": 0.23013,
33
+ "recall_at_1000": 0.54865,
34
+ "recall_at_3": 0.06554,
35
+ "recall_at_5": 0.08087
36
+ }
37
+ }
evaluation/mteb/NQ.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 1782.69,
6
+ "map_at_1": 0.15621,
7
+ "map_at_10": 0.24519,
8
+ "map_at_100": 0.25586,
9
+ "map_at_1000": 0.25662,
10
+ "map_at_3": 0.21619,
11
+ "map_at_5": 0.23232,
12
+ "mrr_at_1": 0.17497,
13
+ "mrr_at_10": 0.26301,
14
+ "mrr_at_100": 0.27235,
15
+ "mrr_at_1000": 0.27297,
16
+ "mrr_at_3": 0.23561,
17
+ "mrr_at_5": 0.25111,
18
+ "ndcg_at_1": 0.17497,
19
+ "ndcg_at_10": 0.29725,
20
+ "ndcg_at_100": 0.34824,
21
+ "ndcg_at_1000": 0.36907,
22
+ "ndcg_at_3": 0.23946,
23
+ "ndcg_at_5": 0.26739,
24
+ "precision_at_1": 0.17497,
25
+ "precision_at_10": 0.05217,
26
+ "precision_at_100": 0.0081,
27
+ "precision_at_1000": 0.00101,
28
+ "precision_at_3": 0.11114,
29
+ "precision_at_5": 0.08285,
30
+ "recall_at_1": 0.15621,
31
+ "recall_at_10": 0.43999,
32
+ "recall_at_100": 0.67183,
33
+ "recall_at_1000": 0.83174,
34
+ "recall_at_3": 0.2872,
35
+ "recall_at_5": 0.35154
36
+ }
37
+ }
evaluation/mteb/QuoraRetrieval.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 296.55,
6
+ "map_at_1": 0.54717,
7
+ "map_at_10": 0.67514,
8
+ "map_at_100": 0.68484,
9
+ "map_at_1000": 0.68523,
10
+ "map_at_3": 0.64169,
11
+ "map_at_5": 0.66054,
12
+ "mrr_at_1": 0.6246,
13
+ "mrr_at_10": 0.71503,
14
+ "mrr_at_100": 0.71915,
15
+ "mrr_at_1000": 0.71923,
16
+ "mrr_at_3": 0.69468,
17
+ "mrr_at_5": 0.70677,
18
+ "ndcg_at_1": 0.6248,
19
+ "ndcg_at_10": 0.7298,
20
+ "ndcg_at_100": 0.76023,
21
+ "ndcg_at_1000": 0.76512,
22
+ "ndcg_at_3": 0.68138,
23
+ "ndcg_at_5": 0.70458,
24
+ "precision_at_1": 0.6248,
25
+ "precision_at_10": 0.11373,
26
+ "precision_at_100": 0.01437,
27
+ "precision_at_1000": 0.00154,
28
+ "precision_at_3": 0.29623,
29
+ "precision_at_5": 0.19918,
30
+ "recall_at_1": 0.54717,
31
+ "recall_at_10": 0.84745,
32
+ "recall_at_100": 0.96528,
33
+ "recall_at_1000": 0.9939,
34
+ "recall_at_3": 0.71606,
35
+ "recall_at_5": 0.77511
36
+ }
37
+ }
evaluation/mteb/RedditClustering.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "evaluation_time": 483.06,
4
+ "v_measure": 0.4023390747226228,
5
+ "v_measure_std": 0.05592188317124693
6
+ },
7
+ "validation": {
8
+ "evaluation_time": 486.87,
9
+ "v_measure": 0.4023390747226228,
10
+ "v_measure_std": 0.05592188317124693
11
+ },
12
+ "dataset_version": null,
13
+ "mteb_version": "0.0.2"
14
+ }
evaluation/mteb/RedditClusteringP2P.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 1471.4,
6
+ "v_measure": 0.49090518272935624,
7
+ "v_measure_std": 0.11128243444352012
8
+ }
9
+ }
evaluation/mteb/SCIDOCS.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_version": null,
3
+ "mteb_version": "0.0.2",
4
+ "test": {
5
+ "evaluation_time": 102.16,
6
+ "map_at_1": 0.03028,
7
+ "map_at_10": 0.06968,
8
+ "map_at_100": 0.082,
9
+ "map_at_1000": 0.08432,
10
+ "map_at_3": 0.05307,
11
+ "map_at_5": 0.06099,
12
+ "mrr_at_1": 0.148,
13
+ "mrr_at_10": 0.22425,
14
+ "mrr_at_100": 0.23577,
15
+ "mrr_at_1000": 0.2367,
16
+ "mrr_at_3": 0.20233,
17
+ "mrr_at_5": 0.21318,
18
+ "ndcg_at_1": 0.148,
19
+ "ndcg_at_10": 0.12206,
20
+ "ndcg_at_100": 0.17799,
21
+ "ndcg_at_1000": 0.22891,
22
+ "ndcg_at_3": 0.12128,
23
+ "ndcg_at_5": 0.10212,
24
+ "precision_at_1": 0.148,
25
+ "precision_at_10": 0.0617,
26
+ "precision_at_100": 0.01428,
27
+ "precision_at_1000": 0.00266,
28
+ "precision_at_3": 0.11333,
29
+ "precision_at_5": 0.0874,
30
+ "recall_at_1": 0.03028,
31
+ "recall_at_10": 0.12522,
32
+ "recall_at_100": 0.28975,
33
+ "recall_at_1000": 0.54038,
34
+ "recall_at_3": 0.06913,
35
+ "recall_at_5": 0.08883
36
+ }
37
+ }
evaluation/mteb/SGPT-125M-weightedmean-msmarco-specb-bitfit_results.csv ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model,task,dataset,language,metric,value
2
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,BitextMining,BUCC,,f1,
3
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,BitextMining,Tatoeba,,f1,
4
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,AmazonCounterfactualClassification,en,accuracy,0.6123880597014926
5
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,AmazonCounterfactualClassification,de,accuracy,0.5688436830835117
6
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,AmazonCounterfactualClassification,en-ext,accuracy,0.5827586206896551
7
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,AmazonCounterfactualClassification,ja,accuracy,0.5464668094218414
8
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,AmazonPolarityClassification,en,accuracy,0.65401225
9
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,AmazonReviewsClassification,en,accuracy,0.31165999999999994
10
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,AmazonReviewsClassification,de,accuracy,0.2479
11
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,AmazonReviewsClassification,es,accuracy,0.26643999999999995
12
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,AmazonReviewsClassification,fr,accuracy,0.26386000000000004
13
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,AmazonReviewsClassification,ja,accuracy,0.22078000000000003
14
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,AmazonReviewsClassification,zh,accuracy,0.24274
15
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,Banking77Classification,en,accuracy,0.7770454545454545
16
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,EmotionClassification,en,accuracy,0.39075000000000004
17
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,ImdbClassification,en,accuracy,0.586696
18
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,af,accuracy,0.4054808338937458
19
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,am,accuracy,0.2418291862811029
20
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,ar,accuracy,0.30134498991257563
21
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,az,accuracy,0.35884330867518494
22
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,bn,accuracy,0.2917283120376597
23
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,cy,accuracy,0.41788836583725625
24
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,da,accuracy,0.44176193678547404
25
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,de,accuracy,0.4207464694014795
26
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,el,accuracy,0.362542030934768
27
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,en,accuracy,0.6140887693342301
28
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,es,accuracy,0.42679892400806996
29
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,fa,accuracy,0.3559179556153329
30
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,fi,accuracy,0.40036987222595827
31
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,fr,accuracy,0.4343981170141224
32
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,he,accuracy,0.3159381304640215
33
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,hi,accuracy,0.27044384667114996
34
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,hu,accuracy,0.38453261600538
35
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,hy,accuracy,0.2797915265635508
36
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,id,accuracy,0.4397108271687963
37
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,is,accuracy,0.40302622730329524
38
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,it,accuracy,0.45474108944182917
39
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,ja,accuracy,0.4560860793544048
40
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,jv,accuracy,0.386684599865501
41
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,ka,accuracy,0.25652320107599197
42
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,km,accuracy,0.28295225285810355
43
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,kn,accuracy,0.23480161398789506
44
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,ko,accuracy,0.3655682582380632
45
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,lv,accuracy,0.4184936112979153
46
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,ml,accuracy,0.2490921318090114
47
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,mn,accuracy,0.2986213853396099
48
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,ms,accuracy,0.4242098184263618
49
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,my,accuracy,0.25131136516476127
50
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,nb,accuracy,0.3981506388702084
51
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,nl,accuracy,0.4362138533960995
52
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,pl,accuracy,0.4219569603227976
53
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,pt,accuracy,0.4520847343644923
54
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,ro,accuracy,0.4180901143241426
55
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,ru,accuracy,0.3596839273705447
56
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,sl,accuracy,0.40605245460659045
57
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,sq,accuracy,0.42757229320780094
58
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,sv,accuracy,0.42347007397444514
59
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,sw,accuracy,0.4112306657700067
60
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,ta,accuracy,0.24603227975790182
61
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,te,accuracy,0.2503698722259583
62
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,th,accuracy,0.35400134498991254
63
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,tl,accuracy,0.4119031607262945
64
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,tr,accuracy,0.3640551445864156
65
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,ur,accuracy,0.25934767989240076
66
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,vi,accuracy,0.38799596503026224
67
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,zh-CN,accuracy,0.4624411566913248
68
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveIntentClassification,zh-TW,accuracy,0.4230665770006724
69
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,af,accuracy,0.43248150638870203
70
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,am,accuracy,0.2530262273032952
71
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,ar,accuracy,0.3207128446536651
72
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,az,accuracy,0.3668123739071957
73
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,bn,accuracy,0.2956624075319435
74
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,cy,accuracy,0.421049092131809
75
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,da,accuracy,0.4544384667114997
76
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,de,accuracy,0.43211163416274373
77
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,el,accuracy,0.3650302622730329
78
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,en,accuracy,0.6973772696704774
79
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,es,accuracy,0.44078681909885675
80
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,fa,accuracy,0.32612642905178213
81
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,fi,accuracy,0.40356422326832553
82
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,fr,accuracy,0.4506724949562878
83
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,he,accuracy,0.3217888365837256
84
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,hi,accuracy,0.26903160726294556
85
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,hu,accuracy,0.4037995965030262
86
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,hy,accuracy,0.28375924680564896
87
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,id,accuracy,0.44361129791526566
88
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,is,accuracy,0.39290517821116344
89
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,it,accuracy,0.46469401479488903
90
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,ja,accuracy,0.46257565568258235
91
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,jv,accuracy,0.41126429051782115
92
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,ka,accuracy,0.24727639542703428
93
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,km,accuracy,0.2974108944182918
94
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,kn,accuracy,0.23850033624747816
95
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,ko,accuracy,0.3656691324815064
96
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,lv,accuracy,0.40928043039677203
97
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,ml,accuracy,0.25527908540685945
98
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,mn,accuracy,0.29105581708137185
99
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,ms,accuracy,0.43786146603900467
100
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,my,accuracy,0.27269670477471414
101
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,nb,accuracy,0.3901815736381977
102
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,nl,accuracy,0.4535978480161399
103
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,pl,accuracy,0.41893073301950234
104
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,pt,accuracy,0.45901143241425685
105
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,ro,accuracy,0.44115669132481505
106
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,ru,accuracy,0.3276395427034297
107
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,sl,accuracy,0.40504371217215873
108
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,sq,accuracy,0.4251849361129792
109
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,sv,accuracy,0.42293207800941496
110
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,sw,accuracy,0.42999327505043705
111
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,ta,accuracy,0.2832548755884331
112
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,te,accuracy,0.26593813046402154
113
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,th,accuracy,0.36788836583725626
114
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,tl,accuracy,0.425689307330195
115
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,tr,accuracy,0.37094821788836585
116
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,ur,accuracy,0.2883658372562206
117
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,vi,accuracy,0.37357094821788833
118
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,zh-CN,accuracy,0.4937794216543375
119
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MassiveScenarioClassification,zh-TW,accuracy,0.44421654337592476
120
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPDomainClassification,en,accuracy,0.8695622435020519
121
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPDomainClassification,de,accuracy,0.6273034657650043
122
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPDomainClassification,es,accuracy,0.6754503002001334
123
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPDomainClassification,fr,accuracy,0.653523332289383
124
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPDomainClassification,hi,accuracy,0.45371100752958055
125
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPDomainClassification,th,accuracy,0.5527667269439421
126
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPIntentClassification,en,accuracy,0.6225262197902417
127
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPIntentClassification,de,accuracy,0.4956043956043956
128
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPIntentClassification,es,accuracy,0.4993995997331555
129
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPIntentClassification,fr,accuracy,0.46329470717193855
130
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPIntentClassification,hi,accuracy,0.3220867694514163
131
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,MTOPIntentClassification,th,accuracy,0.43627486437613017
132
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,ToxicConversationsClassification,en,accuracy,0.6265799999999999
133
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,TweetSentimentExtractionClassification,en,accuracy,0.5240803621958121
134
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,ArxivClusteringP2P,en,v_measure,0.3970858340673288
135
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,ArxivClusteringS2S,en,v_measure,0.2824284771372105
136
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,BiorxivClusteringP2P,en,v_measure,0.33632603955439844
137
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,BiorxivClusteringS2S,en,v_measure,0.27038042665369927
138
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,MedrxivClusteringP2P,en,v_measure,0.31374938993074253
139
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,MedrxivClusteringS2S,en,v_measure,0.26871455379644094
140
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,RedditClustering,en,v_measure,0.4023390747226228
141
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,RedditClusteringP2P,en,v_measure,0.49090518272935624
142
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,StackExchangeClustering,en,v_measure,0.5274481093815175
143
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,StackExchangeClusteringP2P,en,v_measure,0.3265999453562101
144
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,TwentyNewsgroupsClustering,en,v_measure,0.3212697126747911
145
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,PairClassification,SprintDuplicateQuestions,en,ap,0.8988577913120002
146
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,PairClassification,TwitterSemEval2015,en,ap,0.5474680676121269
147
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,PairClassification,TwitterURLCorpus,en,ap,0.8105760818661524
148
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Reranking,AskUbuntuDupQuestions,en,map,0.5583700395192394
149
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Reranking,MindSmallReranking,en,map,0.30402396942935334
150
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Reranking,SciDocsRR,en,map,0.7133941904192648
151
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Reranking,StackOverflowDupQuestions,en,map,0.44744984645554653
152
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,ArguAna,en,ndcg_at_10,0.45425
153
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,ClimateFEVER,en,ndcg_at_10,0.21858
154
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,CQADupstackRetrieval,en,ndcg_at_10,0.27248666666666665
155
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,DBPedia,en,ndcg_at_10,0.22718
156
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,FEVER,en,ndcg_at_10,0.6045
157
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,FiQA2018,en,ndcg_at_10,0.21118
158
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,HotpotQA,en,ndcg_at_10,0.40876
159
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,MSMARCO,en,ndcg_at_10,0.27975
160
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,NFCorpus,en,ndcg_at_10,0.22792
161
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,NQ,en,ndcg_at_10,0.29725
162
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,QuoraRetrieval,en,ndcg_at_10,0.7298
163
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,SCIDOCS,en,ndcg_at_10,0.12206
164
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,SciFact,en,ndcg_at_10,0.56899
165
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,Touche2020,en,ndcg_at_10,0.22972
166
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,TRECCOVID,en,ndcg_at_10,0.70302
167
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,BIOSSES,en,cosine_spearman,0.7520954502580506
168
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,SICK-R,en,cosine_spearman,0.6592910683118656
169
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS12,en,cosine_spearman,0.6652980061546658
170
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS13,en,cosine_spearman,0.7616628863742361
171
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS14,en,cosine_spearman,0.6904572664009687
172
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS15,en,cosine_spearman,0.7923677712825851
173
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS16,en,cosine_spearman,0.7606792422441928
174
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS17,ko-ko,cosine_spearman,0.5238601027550566
175
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS17,ar-ar,cosine_spearman,0.5561674586076298
176
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS17,en-ar,cosine_spearman,0.08209569244801064
177
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS17,en-de,cosine_spearman,0.3018181775929109
178
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS17,en-en,cosine_spearman,0.8495398260629698
179
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS17,en-tr,cosine_spearman,0.010393399782021343
180
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS17,es-en,cosine_spearman,0.28776666666659906
181
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS17,es-es,cosine_spearman,0.7188444295144646
182
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS17,fr-en,cosine_spearman,0.26339466714066445
183
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS17,it-en,cosine_spearman,0.20729929404589678
184
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS17,nl-en,cosine_spearman,0.2505342961279355
185
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,en,cosine_spearman,0.6566183708171826
186
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,de,cosine_spearman,0.21987647321429005
187
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,es,cosine_spearman,0.49811823238907665
188
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,pl,cosine_spearman,0.23308439517991938
189
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,tr,cosine_spearman,0.3466348380997687
190
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,ar,cosine_spearman,0.2818922448944151
191
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,ru,cosine_spearman,0.09068119621940965
192
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,zh,cosine_spearman,0.2548511383289232
193
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,fr,cosine_spearman,0.6766493409568727
194
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,de-en,cosine_spearman,0.5307135629778896
195
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,es-en,cosine_spearman,0.49512539047677256
196
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,it,cosine_spearman,0.4824795739512037
197
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,pl-en,cosine_spearman,0.36798674894178013
198
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,zh-en,cosine_spearman,0.2868170719697501
199
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,es-it,cosine_spearman,0.45782560880405704
200
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,de-fr,cosine_spearman,0.3296920218281008
201
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,de-pl,cosine_spearman,0.20447284723752715
202
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STS22,fr-pl,cosine_spearman,0.6197797868009122
203
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,STSBenchmark,en,cosine_spearman,0.7533716094627373
204
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Summarization,SummEval,en,cosine_spearman,0.2890145030911965
205
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Classification,average,en,accuracy,0.6072305523949799
206
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Clustering,average,en,v_measure,0.3579315223640289
207
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,PairClassification,average,en,ap,0.7523006469300931
208
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Reranking,average,en,map,0.505809511455851
209
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,Retrieval,average,en,ndcg_at_10,0.37036311111111125
210
+ SGPT-125M-weightedmean-msmarco-specb-bitfit,STS,average,en,cosine_spearman,0.7341381497369455
evaluation/mteb/SICK-R.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "cos_sim": {
4
+ "pearson": 0.7662983928119752,
5
+ "spearman": 0.6592910683118656
6
+ },
7
+ "euclidean": {
8
+ "pearson": 0.7110290039690963,
9
+ "spearman": 0.6480076622426653
10
+ },
11
+ "evaluation_time": 6.26,
12
+ "manhattan": {
13
+ "pearson": 0.708944726230188,
14
+ "spearman": 0.6475082576033987
15
+ }
16
+ }
17
+ }
evaluation/mteb/STS12.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "cos_sim": {
4
+ "pearson": 0.7442679147085554,
5
+ "spearman": 0.6652980061546658
6
+ },
7
+ "euclidean": {
8
+ "pearson": 0.7487039477408763,
9
+ "spearman": 0.7063397666902785
10
+ },
11
+ "evaluation_time": 5.89,
12
+ "manhattan": {
13
+ "pearson": 0.7497015137513088,
14
+ "spearman": 0.7075951355434326
15
+ }
16
+ },
17
+ "dataset_version": null,
18
+ "mteb_version": "0.0.2"
19
+ }
evaluation/mteb/STS13.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test": {
3
+ "cos_sim": {
4
+ "pearson": 0.7562472426599542,
5
+ "spearman": 0.7616628863742361
6
+ },
7
+ "euclidean": {
8
+ "pearson": 0.763297128081315,
9
+ "spearman": 0.7719385151966562
10
+ },
11
+ "evaluation_time": 3.17,
12
+ "manhattan": {
13
+ "pearson": 0.7650363291423257,
14
+ "spearman": 0.7737081896355399
15
+ }
16
+ },
17
+ "dataset_version": null,
18
+ "mteb_version": "0.0.2"
19
+ }