Muennighoff
commited on
Commit
•
04c1a2b
1
Parent(s):
63c505b
Update README.md (#1)
Browse files- Update README.md (e3c8e9ee7b7646d6f87db95ffc44d9d619ea1923)
README.md
CHANGED
@@ -15,6 +15,7 @@ model-index:
|
|
15 |
name: MTEB AmazonCounterfactualClassification (en)
|
16 |
config: en
|
17 |
split: test
|
|
|
18 |
metrics:
|
19 |
- type: accuracy
|
20 |
value: 61.23880597014926
|
@@ -29,6 +30,7 @@ model-index:
|
|
29 |
name: MTEB AmazonCounterfactualClassification (de)
|
30 |
config: de
|
31 |
split: test
|
|
|
32 |
metrics:
|
33 |
- type: accuracy
|
34 |
value: 56.88436830835117
|
@@ -43,6 +45,7 @@ model-index:
|
|
43 |
name: MTEB AmazonCounterfactualClassification (en-ext)
|
44 |
config: en-ext
|
45 |
split: test
|
|
|
46 |
metrics:
|
47 |
- type: accuracy
|
48 |
value: 58.27586206896551
|
@@ -57,6 +60,7 @@ model-index:
|
|
57 |
name: MTEB AmazonCounterfactualClassification (ja)
|
58 |
config: ja
|
59 |
split: test
|
|
|
60 |
metrics:
|
61 |
- type: accuracy
|
62 |
value: 54.64668094218415
|
@@ -71,6 +75,7 @@ model-index:
|
|
71 |
name: MTEB AmazonPolarityClassification
|
72 |
config: default
|
73 |
split: test
|
|
|
74 |
metrics:
|
75 |
- type: accuracy
|
76 |
value: 65.401225
|
@@ -85,6 +90,7 @@ model-index:
|
|
85 |
name: MTEB AmazonReviewsClassification (en)
|
86 |
config: en
|
87 |
split: test
|
|
|
88 |
metrics:
|
89 |
- type: accuracy
|
90 |
value: 31.165999999999993
|
@@ -97,6 +103,7 @@ model-index:
|
|
97 |
name: MTEB AmazonReviewsClassification (de)
|
98 |
config: de
|
99 |
split: test
|
|
|
100 |
metrics:
|
101 |
- type: accuracy
|
102 |
value: 24.79
|
@@ -109,6 +116,7 @@ model-index:
|
|
109 |
name: MTEB AmazonReviewsClassification (es)
|
110 |
config: es
|
111 |
split: test
|
|
|
112 |
metrics:
|
113 |
- type: accuracy
|
114 |
value: 26.643999999999995
|
@@ -121,6 +129,7 @@ model-index:
|
|
121 |
name: MTEB AmazonReviewsClassification (fr)
|
122 |
config: fr
|
123 |
split: test
|
|
|
124 |
metrics:
|
125 |
- type: accuracy
|
126 |
value: 26.386000000000003
|
@@ -133,6 +142,7 @@ model-index:
|
|
133 |
name: MTEB AmazonReviewsClassification (ja)
|
134 |
config: ja
|
135 |
split: test
|
|
|
136 |
metrics:
|
137 |
- type: accuracy
|
138 |
value: 22.078000000000003
|
@@ -145,6 +155,7 @@ model-index:
|
|
145 |
name: MTEB AmazonReviewsClassification (zh)
|
146 |
config: zh
|
147 |
split: test
|
|
|
148 |
metrics:
|
149 |
- type: accuracy
|
150 |
value: 24.274
|
@@ -157,6 +168,7 @@ model-index:
|
|
157 |
name: MTEB ArguAna
|
158 |
config: default
|
159 |
split: test
|
|
|
160 |
metrics:
|
161 |
- type: map_at_1
|
162 |
value: 22.404
|
@@ -225,6 +237,7 @@ model-index:
|
|
225 |
name: MTEB ArxivClusteringP2P
|
226 |
config: default
|
227 |
split: test
|
|
|
228 |
metrics:
|
229 |
- type: v_measure
|
230 |
value: 39.70858340673288
|
@@ -235,6 +248,7 @@ model-index:
|
|
235 |
name: MTEB ArxivClusteringS2S
|
236 |
config: default
|
237 |
split: test
|
|
|
238 |
metrics:
|
239 |
- type: v_measure
|
240 |
value: 28.242847713721048
|
@@ -245,6 +259,7 @@ model-index:
|
|
245 |
name: MTEB AskUbuntuDupQuestions
|
246 |
config: default
|
247 |
split: test
|
|
|
248 |
metrics:
|
249 |
- type: map
|
250 |
value: 55.83700395192393
|
@@ -257,6 +272,7 @@ model-index:
|
|
257 |
name: MTEB BIOSSES
|
258 |
config: default
|
259 |
split: test
|
|
|
260 |
metrics:
|
261 |
- type: cos_sim_pearson
|
262 |
value: 79.25366801756223
|
@@ -277,6 +293,7 @@ model-index:
|
|
277 |
name: MTEB Banking77Classification
|
278 |
config: default
|
279 |
split: test
|
|
|
280 |
metrics:
|
281 |
- type: accuracy
|
282 |
value: 77.70454545454545
|
@@ -289,6 +306,7 @@ model-index:
|
|
289 |
name: MTEB BiorxivClusteringP2P
|
290 |
config: default
|
291 |
split: test
|
|
|
292 |
metrics:
|
293 |
- type: v_measure
|
294 |
value: 33.63260395543984
|
@@ -299,6 +317,7 @@ model-index:
|
|
299 |
name: MTEB BiorxivClusteringS2S
|
300 |
config: default
|
301 |
split: test
|
|
|
302 |
metrics:
|
303 |
- type: v_measure
|
304 |
value: 27.038042665369925
|
@@ -309,6 +328,7 @@ model-index:
|
|
309 |
name: MTEB CQADupstackAndroidRetrieval
|
310 |
config: default
|
311 |
split: test
|
|
|
312 |
metrics:
|
313 |
- type: map_at_1
|
314 |
value: 22.139
|
@@ -377,6 +397,7 @@ model-index:
|
|
377 |
name: MTEB CQADupstackEnglishRetrieval
|
378 |
config: default
|
379 |
split: test
|
|
|
380 |
metrics:
|
381 |
- type: map_at_1
|
382 |
value: 20.652
|
@@ -445,6 +466,7 @@ model-index:
|
|
445 |
name: MTEB CQADupstackGamingRetrieval
|
446 |
config: default
|
447 |
split: test
|
|
|
448 |
metrics:
|
449 |
- type: map_at_1
|
450 |
value: 25.180000000000003
|
@@ -513,6 +535,7 @@ model-index:
|
|
513 |
name: MTEB CQADupstackGisRetrieval
|
514 |
config: default
|
515 |
split: test
|
|
|
516 |
metrics:
|
517 |
- type: map_at_1
|
518 |
value: 16.303
|
@@ -581,6 +604,7 @@ model-index:
|
|
581 |
name: MTEB CQADupstackMathematicaRetrieval
|
582 |
config: default
|
583 |
split: test
|
|
|
584 |
metrics:
|
585 |
- type: map_at_1
|
586 |
value: 10.133000000000001
|
@@ -649,6 +673,7 @@ model-index:
|
|
649 |
name: MTEB CQADupstackPhysicsRetrieval
|
650 |
config: default
|
651 |
split: test
|
|
|
652 |
metrics:
|
653 |
- type: map_at_1
|
654 |
value: 19.991999999999997
|
@@ -717,6 +742,7 @@ model-index:
|
|
717 |
name: MTEB CQADupstackProgrammersRetrieval
|
718 |
config: default
|
719 |
split: test
|
|
|
720 |
metrics:
|
721 |
- type: map_at_1
|
722 |
value: 17.896
|
@@ -785,6 +811,7 @@ model-index:
|
|
785 |
name: MTEB CQADupstackRetrieval
|
786 |
config: default
|
787 |
split: test
|
|
|
788 |
metrics:
|
789 |
- type: map_at_1
|
790 |
value: 17.195166666666665
|
@@ -853,6 +880,7 @@ model-index:
|
|
853 |
name: MTEB CQADupstackStatsRetrieval
|
854 |
config: default
|
855 |
split: test
|
|
|
856 |
metrics:
|
857 |
- type: map_at_1
|
858 |
value: 16.779
|
@@ -921,6 +949,7 @@ model-index:
|
|
921 |
name: MTEB CQADupstackTexRetrieval
|
922 |
config: default
|
923 |
split: test
|
|
|
924 |
metrics:
|
925 |
- type: map_at_1
|
926 |
value: 9.279
|
@@ -989,6 +1018,7 @@ model-index:
|
|
989 |
name: MTEB CQADupstackUnixRetrieval
|
990 |
config: default
|
991 |
split: test
|
|
|
992 |
metrics:
|
993 |
- type: map_at_1
|
994 |
value: 16.36
|
@@ -1057,6 +1087,7 @@ model-index:
|
|
1057 |
name: MTEB CQADupstackWebmastersRetrieval
|
1058 |
config: default
|
1059 |
split: test
|
|
|
1060 |
metrics:
|
1061 |
- type: map_at_1
|
1062 |
value: 17.39
|
@@ -1125,6 +1156,7 @@ model-index:
|
|
1125 |
name: MTEB CQADupstackWordpressRetrieval
|
1126 |
config: default
|
1127 |
split: test
|
|
|
1128 |
metrics:
|
1129 |
- type: map_at_1
|
1130 |
value: 14.238999999999999
|
@@ -1193,6 +1225,7 @@ model-index:
|
|
1193 |
name: MTEB ClimateFEVER
|
1194 |
config: default
|
1195 |
split: test
|
|
|
1196 |
metrics:
|
1197 |
- type: map_at_1
|
1198 |
value: 8.828
|
@@ -1261,6 +1294,7 @@ model-index:
|
|
1261 |
name: MTEB DBPedia
|
1262 |
config: default
|
1263 |
split: test
|
|
|
1264 |
metrics:
|
1265 |
- type: map_at_1
|
1266 |
value: 5.586
|
@@ -1329,6 +1363,7 @@ model-index:
|
|
1329 |
name: MTEB EmotionClassification
|
1330 |
config: default
|
1331 |
split: test
|
|
|
1332 |
metrics:
|
1333 |
- type: accuracy
|
1334 |
value: 39.075
|
@@ -1341,6 +1376,7 @@ model-index:
|
|
1341 |
name: MTEB FEVER
|
1342 |
config: default
|
1343 |
split: test
|
|
|
1344 |
metrics:
|
1345 |
- type: map_at_1
|
1346 |
value: 43.519999999999996
|
@@ -1409,6 +1445,7 @@ model-index:
|
|
1409 |
name: MTEB FiQA2018
|
1410 |
config: default
|
1411 |
split: test
|
|
|
1412 |
metrics:
|
1413 |
- type: map_at_1
|
1414 |
value: 9.549000000000001
|
@@ -1477,6 +1514,7 @@ model-index:
|
|
1477 |
name: MTEB HotpotQA
|
1478 |
config: default
|
1479 |
split: test
|
|
|
1480 |
metrics:
|
1481 |
- type: map_at_1
|
1482 |
value: 25.544
|
@@ -1545,6 +1583,7 @@ model-index:
|
|
1545 |
name: MTEB ImdbClassification
|
1546 |
config: default
|
1547 |
split: test
|
|
|
1548 |
metrics:
|
1549 |
- type: accuracy
|
1550 |
value: 58.6696
|
@@ -1559,6 +1598,7 @@ model-index:
|
|
1559 |
name: MTEB MSMARCO
|
1560 |
config: default
|
1561 |
split: validation
|
|
|
1562 |
metrics:
|
1563 |
- type: map_at_1
|
1564 |
value: 14.442
|
@@ -1627,6 +1667,7 @@ model-index:
|
|
1627 |
name: MTEB MTOPDomainClassification (en)
|
1628 |
config: en
|
1629 |
split: test
|
|
|
1630 |
metrics:
|
1631 |
- type: accuracy
|
1632 |
value: 86.95622435020519
|
@@ -1639,6 +1680,7 @@ model-index:
|
|
1639 |
name: MTEB MTOPDomainClassification (de)
|
1640 |
config: de
|
1641 |
split: test
|
|
|
1642 |
metrics:
|
1643 |
- type: accuracy
|
1644 |
value: 62.73034657650043
|
@@ -1651,6 +1693,7 @@ model-index:
|
|
1651 |
name: MTEB MTOPDomainClassification (es)
|
1652 |
config: es
|
1653 |
split: test
|
|
|
1654 |
metrics:
|
1655 |
- type: accuracy
|
1656 |
value: 67.54503002001334
|
@@ -1663,6 +1706,7 @@ model-index:
|
|
1663 |
name: MTEB MTOPDomainClassification (fr)
|
1664 |
config: fr
|
1665 |
split: test
|
|
|
1666 |
metrics:
|
1667 |
- type: accuracy
|
1668 |
value: 65.35233322893829
|
@@ -1675,6 +1719,7 @@ model-index:
|
|
1675 |
name: MTEB MTOPDomainClassification (hi)
|
1676 |
config: hi
|
1677 |
split: test
|
|
|
1678 |
metrics:
|
1679 |
- type: accuracy
|
1680 |
value: 45.37110075295806
|
@@ -1687,6 +1732,7 @@ model-index:
|
|
1687 |
name: MTEB MTOPDomainClassification (th)
|
1688 |
config: th
|
1689 |
split: test
|
|
|
1690 |
metrics:
|
1691 |
- type: accuracy
|
1692 |
value: 55.276672694394215
|
@@ -1699,6 +1745,7 @@ model-index:
|
|
1699 |
name: MTEB MTOPIntentClassification (en)
|
1700 |
config: en
|
1701 |
split: test
|
|
|
1702 |
metrics:
|
1703 |
- type: accuracy
|
1704 |
value: 62.25262197902417
|
@@ -1711,6 +1758,7 @@ model-index:
|
|
1711 |
name: MTEB MTOPIntentClassification (de)
|
1712 |
config: de
|
1713 |
split: test
|
|
|
1714 |
metrics:
|
1715 |
- type: accuracy
|
1716 |
value: 49.56043956043956
|
@@ -1723,6 +1771,7 @@ model-index:
|
|
1723 |
name: MTEB MTOPIntentClassification (es)
|
1724 |
config: es
|
1725 |
split: test
|
|
|
1726 |
metrics:
|
1727 |
- type: accuracy
|
1728 |
value: 49.93995997331555
|
@@ -1735,6 +1784,7 @@ model-index:
|
|
1735 |
name: MTEB MTOPIntentClassification (fr)
|
1736 |
config: fr
|
1737 |
split: test
|
|
|
1738 |
metrics:
|
1739 |
- type: accuracy
|
1740 |
value: 46.32947071719386
|
@@ -1747,6 +1797,7 @@ model-index:
|
|
1747 |
name: MTEB MTOPIntentClassification (hi)
|
1748 |
config: hi
|
1749 |
split: test
|
|
|
1750 |
metrics:
|
1751 |
- type: accuracy
|
1752 |
value: 32.208676945141626
|
@@ -1759,6 +1810,7 @@ model-index:
|
|
1759 |
name: MTEB MTOPIntentClassification (th)
|
1760 |
config: th
|
1761 |
split: test
|
|
|
1762 |
metrics:
|
1763 |
- type: accuracy
|
1764 |
value: 43.627486437613015
|
@@ -1771,6 +1823,7 @@ model-index:
|
|
1771 |
name: MTEB MassiveIntentClassification (af)
|
1772 |
config: af
|
1773 |
split: test
|
|
|
1774 |
metrics:
|
1775 |
- type: accuracy
|
1776 |
value: 40.548083389374575
|
@@ -1783,6 +1836,7 @@ model-index:
|
|
1783 |
name: MTEB MassiveIntentClassification (am)
|
1784 |
config: am
|
1785 |
split: test
|
|
|
1786 |
metrics:
|
1787 |
- type: accuracy
|
1788 |
value: 24.18291862811029
|
@@ -1795,6 +1849,7 @@ model-index:
|
|
1795 |
name: MTEB MassiveIntentClassification (ar)
|
1796 |
config: ar
|
1797 |
split: test
|
|
|
1798 |
metrics:
|
1799 |
- type: accuracy
|
1800 |
value: 30.134498991257562
|
@@ -1807,6 +1862,7 @@ model-index:
|
|
1807 |
name: MTEB MassiveIntentClassification (az)
|
1808 |
config: az
|
1809 |
split: test
|
|
|
1810 |
metrics:
|
1811 |
- type: accuracy
|
1812 |
value: 35.88433086751849
|
@@ -1819,6 +1875,7 @@ model-index:
|
|
1819 |
name: MTEB MassiveIntentClassification (bn)
|
1820 |
config: bn
|
1821 |
split: test
|
|
|
1822 |
metrics:
|
1823 |
- type: accuracy
|
1824 |
value: 29.17283120376597
|
@@ -1831,6 +1888,7 @@ model-index:
|
|
1831 |
name: MTEB MassiveIntentClassification (cy)
|
1832 |
config: cy
|
1833 |
split: test
|
|
|
1834 |
metrics:
|
1835 |
- type: accuracy
|
1836 |
value: 41.788836583725626
|
@@ -1843,6 +1901,7 @@ model-index:
|
|
1843 |
name: MTEB MassiveIntentClassification (da)
|
1844 |
config: da
|
1845 |
split: test
|
|
|
1846 |
metrics:
|
1847 |
- type: accuracy
|
1848 |
value: 44.176193678547406
|
@@ -1855,6 +1914,7 @@ model-index:
|
|
1855 |
name: MTEB MassiveIntentClassification (de)
|
1856 |
config: de
|
1857 |
split: test
|
|
|
1858 |
metrics:
|
1859 |
- type: accuracy
|
1860 |
value: 42.07464694014795
|
@@ -1867,6 +1927,7 @@ model-index:
|
|
1867 |
name: MTEB MassiveIntentClassification (el)
|
1868 |
config: el
|
1869 |
split: test
|
|
|
1870 |
metrics:
|
1871 |
- type: accuracy
|
1872 |
value: 36.254203093476804
|
@@ -1879,6 +1940,7 @@ model-index:
|
|
1879 |
name: MTEB MassiveIntentClassification (en)
|
1880 |
config: en
|
1881 |
split: test
|
|
|
1882 |
metrics:
|
1883 |
- type: accuracy
|
1884 |
value: 61.40887693342301
|
@@ -1891,6 +1953,7 @@ model-index:
|
|
1891 |
name: MTEB MassiveIntentClassification (es)
|
1892 |
config: es
|
1893 |
split: test
|
|
|
1894 |
metrics:
|
1895 |
- type: accuracy
|
1896 |
value: 42.679892400807
|
@@ -1903,6 +1966,7 @@ model-index:
|
|
1903 |
name: MTEB MassiveIntentClassification (fa)
|
1904 |
config: fa
|
1905 |
split: test
|
|
|
1906 |
metrics:
|
1907 |
- type: accuracy
|
1908 |
value: 35.59179556153329
|
@@ -1915,6 +1979,7 @@ model-index:
|
|
1915 |
name: MTEB MassiveIntentClassification (fi)
|
1916 |
config: fi
|
1917 |
split: test
|
|
|
1918 |
metrics:
|
1919 |
- type: accuracy
|
1920 |
value: 40.036987222595826
|
@@ -1927,6 +1992,7 @@ model-index:
|
|
1927 |
name: MTEB MassiveIntentClassification (fr)
|
1928 |
config: fr
|
1929 |
split: test
|
|
|
1930 |
metrics:
|
1931 |
- type: accuracy
|
1932 |
value: 43.43981170141224
|
@@ -1939,6 +2005,7 @@ model-index:
|
|
1939 |
name: MTEB MassiveIntentClassification (he)
|
1940 |
config: he
|
1941 |
split: test
|
|
|
1942 |
metrics:
|
1943 |
- type: accuracy
|
1944 |
value: 31.593813046402154
|
@@ -1951,6 +2018,7 @@ model-index:
|
|
1951 |
name: MTEB MassiveIntentClassification (hi)
|
1952 |
config: hi
|
1953 |
split: test
|
|
|
1954 |
metrics:
|
1955 |
- type: accuracy
|
1956 |
value: 27.044384667114997
|
@@ -1963,6 +2031,7 @@ model-index:
|
|
1963 |
name: MTEB MassiveIntentClassification (hu)
|
1964 |
config: hu
|
1965 |
split: test
|
|
|
1966 |
metrics:
|
1967 |
- type: accuracy
|
1968 |
value: 38.453261600538
|
@@ -1975,6 +2044,7 @@ model-index:
|
|
1975 |
name: MTEB MassiveIntentClassification (hy)
|
1976 |
config: hy
|
1977 |
split: test
|
|
|
1978 |
metrics:
|
1979 |
- type: accuracy
|
1980 |
value: 27.979152656355076
|
@@ -1987,6 +2057,7 @@ model-index:
|
|
1987 |
name: MTEB MassiveIntentClassification (id)
|
1988 |
config: id
|
1989 |
split: test
|
|
|
1990 |
metrics:
|
1991 |
- type: accuracy
|
1992 |
value: 43.97108271687963
|
@@ -1999,6 +2070,7 @@ model-index:
|
|
1999 |
name: MTEB MassiveIntentClassification (is)
|
2000 |
config: is
|
2001 |
split: test
|
|
|
2002 |
metrics:
|
2003 |
- type: accuracy
|
2004 |
value: 40.302622730329524
|
@@ -2011,6 +2083,7 @@ model-index:
|
|
2011 |
name: MTEB MassiveIntentClassification (it)
|
2012 |
config: it
|
2013 |
split: test
|
|
|
2014 |
metrics:
|
2015 |
- type: accuracy
|
2016 |
value: 45.474108944182916
|
@@ -2023,6 +2096,7 @@ model-index:
|
|
2023 |
name: MTEB MassiveIntentClassification (ja)
|
2024 |
config: ja
|
2025 |
split: test
|
|
|
2026 |
metrics:
|
2027 |
- type: accuracy
|
2028 |
value: 45.60860793544048
|
@@ -2035,6 +2109,7 @@ model-index:
|
|
2035 |
name: MTEB MassiveIntentClassification (jv)
|
2036 |
config: jv
|
2037 |
split: test
|
|
|
2038 |
metrics:
|
2039 |
- type: accuracy
|
2040 |
value: 38.668459986550104
|
@@ -2047,6 +2122,7 @@ model-index:
|
|
2047 |
name: MTEB MassiveIntentClassification (ka)
|
2048 |
config: ka
|
2049 |
split: test
|
|
|
2050 |
metrics:
|
2051 |
- type: accuracy
|
2052 |
value: 25.6523201075992
|
@@ -2059,6 +2135,7 @@ model-index:
|
|
2059 |
name: MTEB MassiveIntentClassification (km)
|
2060 |
config: km
|
2061 |
split: test
|
|
|
2062 |
metrics:
|
2063 |
- type: accuracy
|
2064 |
value: 28.295225285810353
|
@@ -2071,6 +2148,7 @@ model-index:
|
|
2071 |
name: MTEB MassiveIntentClassification (kn)
|
2072 |
config: kn
|
2073 |
split: test
|
|
|
2074 |
metrics:
|
2075 |
- type: accuracy
|
2076 |
value: 23.480161398789505
|
@@ -2083,6 +2161,7 @@ model-index:
|
|
2083 |
name: MTEB MassiveIntentClassification (ko)
|
2084 |
config: ko
|
2085 |
split: test
|
|
|
2086 |
metrics:
|
2087 |
- type: accuracy
|
2088 |
value: 36.55682582380632
|
@@ -2095,6 +2174,7 @@ model-index:
|
|
2095 |
name: MTEB MassiveIntentClassification (lv)
|
2096 |
config: lv
|
2097 |
split: test
|
|
|
2098 |
metrics:
|
2099 |
- type: accuracy
|
2100 |
value: 41.84936112979153
|
@@ -2107,6 +2187,7 @@ model-index:
|
|
2107 |
name: MTEB MassiveIntentClassification (ml)
|
2108 |
config: ml
|
2109 |
split: test
|
|
|
2110 |
metrics:
|
2111 |
- type: accuracy
|
2112 |
value: 24.90921318090114
|
@@ -2119,6 +2200,7 @@ model-index:
|
|
2119 |
name: MTEB MassiveIntentClassification (mn)
|
2120 |
config: mn
|
2121 |
split: test
|
|
|
2122 |
metrics:
|
2123 |
- type: accuracy
|
2124 |
value: 29.86213853396099
|
@@ -2131,6 +2213,7 @@ model-index:
|
|
2131 |
name: MTEB MassiveIntentClassification (ms)
|
2132 |
config: ms
|
2133 |
split: test
|
|
|
2134 |
metrics:
|
2135 |
- type: accuracy
|
2136 |
value: 42.42098184263618
|
@@ -2143,6 +2226,7 @@ model-index:
|
|
2143 |
name: MTEB MassiveIntentClassification (my)
|
2144 |
config: my
|
2145 |
split: test
|
|
|
2146 |
metrics:
|
2147 |
- type: accuracy
|
2148 |
value: 25.131136516476126
|
@@ -2155,6 +2239,7 @@ model-index:
|
|
2155 |
name: MTEB MassiveIntentClassification (nb)
|
2156 |
config: nb
|
2157 |
split: test
|
|
|
2158 |
metrics:
|
2159 |
- type: accuracy
|
2160 |
value: 39.81506388702084
|
@@ -2167,6 +2252,7 @@ model-index:
|
|
2167 |
name: MTEB MassiveIntentClassification (nl)
|
2168 |
config: nl
|
2169 |
split: test
|
|
|
2170 |
metrics:
|
2171 |
- type: accuracy
|
2172 |
value: 43.62138533960995
|
@@ -2179,6 +2265,7 @@ model-index:
|
|
2179 |
name: MTEB MassiveIntentClassification (pl)
|
2180 |
config: pl
|
2181 |
split: test
|
|
|
2182 |
metrics:
|
2183 |
- type: accuracy
|
2184 |
value: 42.19569603227976
|
@@ -2191,6 +2278,7 @@ model-index:
|
|
2191 |
name: MTEB MassiveIntentClassification (pt)
|
2192 |
config: pt
|
2193 |
split: test
|
|
|
2194 |
metrics:
|
2195 |
- type: accuracy
|
2196 |
value: 45.20847343644923
|
@@ -2203,6 +2291,7 @@ model-index:
|
|
2203 |
name: MTEB MassiveIntentClassification (ro)
|
2204 |
config: ro
|
2205 |
split: test
|
|
|
2206 |
metrics:
|
2207 |
- type: accuracy
|
2208 |
value: 41.80901143241426
|
@@ -2215,6 +2304,7 @@ model-index:
|
|
2215 |
name: MTEB MassiveIntentClassification (ru)
|
2216 |
config: ru
|
2217 |
split: test
|
|
|
2218 |
metrics:
|
2219 |
- type: accuracy
|
2220 |
value: 35.96839273705447
|
@@ -2227,6 +2317,7 @@ model-index:
|
|
2227 |
name: MTEB MassiveIntentClassification (sl)
|
2228 |
config: sl
|
2229 |
split: test
|
|
|
2230 |
metrics:
|
2231 |
- type: accuracy
|
2232 |
value: 40.60524546065905
|
@@ -2239,6 +2330,7 @@ model-index:
|
|
2239 |
name: MTEB MassiveIntentClassification (sq)
|
2240 |
config: sq
|
2241 |
split: test
|
|
|
2242 |
metrics:
|
2243 |
- type: accuracy
|
2244 |
value: 42.75722932078009
|
@@ -2251,6 +2343,7 @@ model-index:
|
|
2251 |
name: MTEB MassiveIntentClassification (sv)
|
2252 |
config: sv
|
2253 |
split: test
|
|
|
2254 |
metrics:
|
2255 |
- type: accuracy
|
2256 |
value: 42.347007397444514
|
@@ -2263,6 +2356,7 @@ model-index:
|
|
2263 |
name: MTEB MassiveIntentClassification (sw)
|
2264 |
config: sw
|
2265 |
split: test
|
|
|
2266 |
metrics:
|
2267 |
- type: accuracy
|
2268 |
value: 41.12306657700067
|
@@ -2275,6 +2369,7 @@ model-index:
|
|
2275 |
name: MTEB MassiveIntentClassification (ta)
|
2276 |
config: ta
|
2277 |
split: test
|
|
|
2278 |
metrics:
|
2279 |
- type: accuracy
|
2280 |
value: 24.603227975790183
|
@@ -2287,6 +2382,7 @@ model-index:
|
|
2287 |
name: MTEB MassiveIntentClassification (te)
|
2288 |
config: te
|
2289 |
split: test
|
|
|
2290 |
metrics:
|
2291 |
- type: accuracy
|
2292 |
value: 25.03698722259583
|
@@ -2299,6 +2395,7 @@ model-index:
|
|
2299 |
name: MTEB MassiveIntentClassification (th)
|
2300 |
config: th
|
2301 |
split: test
|
|
|
2302 |
metrics:
|
2303 |
- type: accuracy
|
2304 |
value: 35.40013449899126
|
@@ -2311,6 +2408,7 @@ model-index:
|
|
2311 |
name: MTEB MassiveIntentClassification (tl)
|
2312 |
config: tl
|
2313 |
split: test
|
|
|
2314 |
metrics:
|
2315 |
- type: accuracy
|
2316 |
value: 41.19031607262945
|
@@ -2323,6 +2421,7 @@ model-index:
|
|
2323 |
name: MTEB MassiveIntentClassification (tr)
|
2324 |
config: tr
|
2325 |
split: test
|
|
|
2326 |
metrics:
|
2327 |
- type: accuracy
|
2328 |
value: 36.405514458641555
|
@@ -2335,6 +2434,7 @@ model-index:
|
|
2335 |
name: MTEB MassiveIntentClassification (ur)
|
2336 |
config: ur
|
2337 |
split: test
|
|
|
2338 |
metrics:
|
2339 |
- type: accuracy
|
2340 |
value: 25.934767989240076
|
@@ -2347,6 +2447,7 @@ model-index:
|
|
2347 |
name: MTEB MassiveIntentClassification (vi)
|
2348 |
config: vi
|
2349 |
split: test
|
|
|
2350 |
metrics:
|
2351 |
- type: accuracy
|
2352 |
value: 38.79959650302622
|
@@ -2359,6 +2460,7 @@ model-index:
|
|
2359 |
name: MTEB MassiveIntentClassification (zh-CN)
|
2360 |
config: zh-CN
|
2361 |
split: test
|
|
|
2362 |
metrics:
|
2363 |
- type: accuracy
|
2364 |
value: 46.244115669132476
|
@@ -2371,6 +2473,7 @@ model-index:
|
|
2371 |
name: MTEB MassiveIntentClassification (zh-TW)
|
2372 |
config: zh-TW
|
2373 |
split: test
|
|
|
2374 |
metrics:
|
2375 |
- type: accuracy
|
2376 |
value: 42.30665770006724
|
@@ -2383,6 +2486,7 @@ model-index:
|
|
2383 |
name: MTEB MassiveScenarioClassification (af)
|
2384 |
config: af
|
2385 |
split: test
|
|
|
2386 |
metrics:
|
2387 |
- type: accuracy
|
2388 |
value: 43.2481506388702
|
@@ -2395,6 +2499,7 @@ model-index:
|
|
2395 |
name: MTEB MassiveScenarioClassification (am)
|
2396 |
config: am
|
2397 |
split: test
|
|
|
2398 |
metrics:
|
2399 |
- type: accuracy
|
2400 |
value: 25.30262273032952
|
@@ -2407,6 +2512,7 @@ model-index:
|
|
2407 |
name: MTEB MassiveScenarioClassification (ar)
|
2408 |
config: ar
|
2409 |
split: test
|
|
|
2410 |
metrics:
|
2411 |
- type: accuracy
|
2412 |
value: 32.07128446536651
|
@@ -2419,6 +2525,7 @@ model-index:
|
|
2419 |
name: MTEB MassiveScenarioClassification (az)
|
2420 |
config: az
|
2421 |
split: test
|
|
|
2422 |
metrics:
|
2423 |
- type: accuracy
|
2424 |
value: 36.681237390719566
|
@@ -2431,6 +2538,7 @@ model-index:
|
|
2431 |
name: MTEB MassiveScenarioClassification (bn)
|
2432 |
config: bn
|
2433 |
split: test
|
|
|
2434 |
metrics:
|
2435 |
- type: accuracy
|
2436 |
value: 29.56624075319435
|
@@ -2443,6 +2551,7 @@ model-index:
|
|
2443 |
name: MTEB MassiveScenarioClassification (cy)
|
2444 |
config: cy
|
2445 |
split: test
|
|
|
2446 |
metrics:
|
2447 |
- type: accuracy
|
2448 |
value: 42.1049092131809
|
@@ -2455,6 +2564,7 @@ model-index:
|
|
2455 |
name: MTEB MassiveScenarioClassification (da)
|
2456 |
config: da
|
2457 |
split: test
|
|
|
2458 |
metrics:
|
2459 |
- type: accuracy
|
2460 |
value: 45.44384667114997
|
@@ -2467,6 +2577,7 @@ model-index:
|
|
2467 |
name: MTEB MassiveScenarioClassification (de)
|
2468 |
config: de
|
2469 |
split: test
|
|
|
2470 |
metrics:
|
2471 |
- type: accuracy
|
2472 |
value: 43.211163416274374
|
@@ -2479,6 +2590,7 @@ model-index:
|
|
2479 |
name: MTEB MassiveScenarioClassification (el)
|
2480 |
config: el
|
2481 |
split: test
|
|
|
2482 |
metrics:
|
2483 |
- type: accuracy
|
2484 |
value: 36.503026227303295
|
@@ -2491,6 +2603,7 @@ model-index:
|
|
2491 |
name: MTEB MassiveScenarioClassification (en)
|
2492 |
config: en
|
2493 |
split: test
|
|
|
2494 |
metrics:
|
2495 |
- type: accuracy
|
2496 |
value: 69.73772696704773
|
@@ -2503,6 +2616,7 @@ model-index:
|
|
2503 |
name: MTEB MassiveScenarioClassification (es)
|
2504 |
config: es
|
2505 |
split: test
|
|
|
2506 |
metrics:
|
2507 |
- type: accuracy
|
2508 |
value: 44.078681909885674
|
@@ -2515,6 +2629,7 @@ model-index:
|
|
2515 |
name: MTEB MassiveScenarioClassification (fa)
|
2516 |
config: fa
|
2517 |
split: test
|
|
|
2518 |
metrics:
|
2519 |
- type: accuracy
|
2520 |
value: 32.61264290517821
|
@@ -2527,6 +2642,7 @@ model-index:
|
|
2527 |
name: MTEB MassiveScenarioClassification (fi)
|
2528 |
config: fi
|
2529 |
split: test
|
|
|
2530 |
metrics:
|
2531 |
- type: accuracy
|
2532 |
value: 40.35642232683255
|
@@ -2539,6 +2655,7 @@ model-index:
|
|
2539 |
name: MTEB MassiveScenarioClassification (fr)
|
2540 |
config: fr
|
2541 |
split: test
|
|
|
2542 |
metrics:
|
2543 |
- type: accuracy
|
2544 |
value: 45.06724949562878
|
@@ -2551,6 +2668,7 @@ model-index:
|
|
2551 |
name: MTEB MassiveScenarioClassification (he)
|
2552 |
config: he
|
2553 |
split: test
|
|
|
2554 |
metrics:
|
2555 |
- type: accuracy
|
2556 |
value: 32.178883658372555
|
@@ -2563,6 +2681,7 @@ model-index:
|
|
2563 |
name: MTEB MassiveScenarioClassification (hi)
|
2564 |
config: hi
|
2565 |
split: test
|
|
|
2566 |
metrics:
|
2567 |
- type: accuracy
|
2568 |
value: 26.903160726294555
|
@@ -2575,6 +2694,7 @@ model-index:
|
|
2575 |
name: MTEB MassiveScenarioClassification (hu)
|
2576 |
config: hu
|
2577 |
split: test
|
|
|
2578 |
metrics:
|
2579 |
- type: accuracy
|
2580 |
value: 40.379959650302624
|
@@ -2587,6 +2707,7 @@ model-index:
|
|
2587 |
name: MTEB MassiveScenarioClassification (hy)
|
2588 |
config: hy
|
2589 |
split: test
|
|
|
2590 |
metrics:
|
2591 |
- type: accuracy
|
2592 |
value: 28.375924680564896
|
@@ -2599,6 +2720,7 @@ model-index:
|
|
2599 |
name: MTEB MassiveScenarioClassification (id)
|
2600 |
config: id
|
2601 |
split: test
|
|
|
2602 |
metrics:
|
2603 |
- type: accuracy
|
2604 |
value: 44.361129791526565
|
@@ -2611,6 +2733,7 @@ model-index:
|
|
2611 |
name: MTEB MassiveScenarioClassification (is)
|
2612 |
config: is
|
2613 |
split: test
|
|
|
2614 |
metrics:
|
2615 |
- type: accuracy
|
2616 |
value: 39.290517821116346
|
@@ -2623,6 +2746,7 @@ model-index:
|
|
2623 |
name: MTEB MassiveScenarioClassification (it)
|
2624 |
config: it
|
2625 |
split: test
|
|
|
2626 |
metrics:
|
2627 |
- type: accuracy
|
2628 |
value: 46.4694014794889
|
@@ -2635,6 +2759,7 @@ model-index:
|
|
2635 |
name: MTEB MassiveScenarioClassification (ja)
|
2636 |
config: ja
|
2637 |
split: test
|
|
|
2638 |
metrics:
|
2639 |
- type: accuracy
|
2640 |
value: 46.25756556825824
|
@@ -2647,6 +2772,7 @@ model-index:
|
|
2647 |
name: MTEB MassiveScenarioClassification (jv)
|
2648 |
config: jv
|
2649 |
split: test
|
|
|
2650 |
metrics:
|
2651 |
- type: accuracy
|
2652 |
value: 41.12642905178212
|
@@ -2659,6 +2785,7 @@ model-index:
|
|
2659 |
name: MTEB MassiveScenarioClassification (ka)
|
2660 |
config: ka
|
2661 |
split: test
|
|
|
2662 |
metrics:
|
2663 |
- type: accuracy
|
2664 |
value: 24.72763954270343
|
@@ -2671,6 +2798,7 @@ model-index:
|
|
2671 |
name: MTEB MassiveScenarioClassification (km)
|
2672 |
config: km
|
2673 |
split: test
|
|
|
2674 |
metrics:
|
2675 |
- type: accuracy
|
2676 |
value: 29.741089441829182
|
@@ -2683,6 +2811,7 @@ model-index:
|
|
2683 |
name: MTEB MassiveScenarioClassification (kn)
|
2684 |
config: kn
|
2685 |
split: test
|
|
|
2686 |
metrics:
|
2687 |
- type: accuracy
|
2688 |
value: 23.850033624747816
|
@@ -2695,6 +2824,7 @@ model-index:
|
|
2695 |
name: MTEB MassiveScenarioClassification (ko)
|
2696 |
config: ko
|
2697 |
split: test
|
|
|
2698 |
metrics:
|
2699 |
- type: accuracy
|
2700 |
value: 36.56691324815064
|
@@ -2707,6 +2837,7 @@ model-index:
|
|
2707 |
name: MTEB MassiveScenarioClassification (lv)
|
2708 |
config: lv
|
2709 |
split: test
|
|
|
2710 |
metrics:
|
2711 |
- type: accuracy
|
2712 |
value: 40.928043039677206
|
@@ -2719,6 +2850,7 @@ model-index:
|
|
2719 |
name: MTEB MassiveScenarioClassification (ml)
|
2720 |
config: ml
|
2721 |
split: test
|
|
|
2722 |
metrics:
|
2723 |
- type: accuracy
|
2724 |
value: 25.527908540685946
|
@@ -2731,6 +2863,7 @@ model-index:
|
|
2731 |
name: MTEB MassiveScenarioClassification (mn)
|
2732 |
config: mn
|
2733 |
split: test
|
|
|
2734 |
metrics:
|
2735 |
- type: accuracy
|
2736 |
value: 29.105581708137183
|
@@ -2743,6 +2876,7 @@ model-index:
|
|
2743 |
name: MTEB MassiveScenarioClassification (ms)
|
2744 |
config: ms
|
2745 |
split: test
|
|
|
2746 |
metrics:
|
2747 |
- type: accuracy
|
2748 |
value: 43.78614660390047
|
@@ -2755,6 +2889,7 @@ model-index:
|
|
2755 |
name: MTEB MassiveScenarioClassification (my)
|
2756 |
config: my
|
2757 |
split: test
|
|
|
2758 |
metrics:
|
2759 |
- type: accuracy
|
2760 |
value: 27.269670477471415
|
@@ -2767,6 +2902,7 @@ model-index:
|
|
2767 |
name: MTEB MassiveScenarioClassification (nb)
|
2768 |
config: nb
|
2769 |
split: test
|
|
|
2770 |
metrics:
|
2771 |
- type: accuracy
|
2772 |
value: 39.018157363819775
|
@@ -2779,6 +2915,7 @@ model-index:
|
|
2779 |
name: MTEB MassiveScenarioClassification (nl)
|
2780 |
config: nl
|
2781 |
split: test
|
|
|
2782 |
metrics:
|
2783 |
- type: accuracy
|
2784 |
value: 45.35978480161399
|
@@ -2791,6 +2928,7 @@ model-index:
|
|
2791 |
name: MTEB MassiveScenarioClassification (pl)
|
2792 |
config: pl
|
2793 |
split: test
|
|
|
2794 |
metrics:
|
2795 |
- type: accuracy
|
2796 |
value: 41.89307330195023
|
@@ -2803,6 +2941,7 @@ model-index:
|
|
2803 |
name: MTEB MassiveScenarioClassification (pt)
|
2804 |
config: pt
|
2805 |
split: test
|
|
|
2806 |
metrics:
|
2807 |
- type: accuracy
|
2808 |
value: 45.901143241425686
|
@@ -2815,6 +2954,7 @@ model-index:
|
|
2815 |
name: MTEB MassiveScenarioClassification (ro)
|
2816 |
config: ro
|
2817 |
split: test
|
|
|
2818 |
metrics:
|
2819 |
- type: accuracy
|
2820 |
value: 44.11566913248151
|
@@ -2827,6 +2967,7 @@ model-index:
|
|
2827 |
name: MTEB MassiveScenarioClassification (ru)
|
2828 |
config: ru
|
2829 |
split: test
|
|
|
2830 |
metrics:
|
2831 |
- type: accuracy
|
2832 |
value: 32.76395427034297
|
@@ -2839,6 +2980,7 @@ model-index:
|
|
2839 |
name: MTEB MassiveScenarioClassification (sl)
|
2840 |
config: sl
|
2841 |
split: test
|
|
|
2842 |
metrics:
|
2843 |
- type: accuracy
|
2844 |
value: 40.504371217215876
|
@@ -2851,6 +2993,7 @@ model-index:
|
|
2851 |
name: MTEB MassiveScenarioClassification (sq)
|
2852 |
config: sq
|
2853 |
split: test
|
|
|
2854 |
metrics:
|
2855 |
- type: accuracy
|
2856 |
value: 42.51849361129792
|
@@ -2863,6 +3006,7 @@ model-index:
|
|
2863 |
name: MTEB MassiveScenarioClassification (sv)
|
2864 |
config: sv
|
2865 |
split: test
|
|
|
2866 |
metrics:
|
2867 |
- type: accuracy
|
2868 |
value: 42.293207800941495
|
@@ -2875,6 +3019,7 @@ model-index:
|
|
2875 |
name: MTEB MassiveScenarioClassification (sw)
|
2876 |
config: sw
|
2877 |
split: test
|
|
|
2878 |
metrics:
|
2879 |
- type: accuracy
|
2880 |
value: 42.9993275050437
|
@@ -2887,6 +3032,7 @@ model-index:
|
|
2887 |
name: MTEB MassiveScenarioClassification (ta)
|
2888 |
config: ta
|
2889 |
split: test
|
|
|
2890 |
metrics:
|
2891 |
- type: accuracy
|
2892 |
value: 28.32548755884331
|
@@ -2899,6 +3045,7 @@ model-index:
|
|
2899 |
name: MTEB MassiveScenarioClassification (te)
|
2900 |
config: te
|
2901 |
split: test
|
|
|
2902 |
metrics:
|
2903 |
- type: accuracy
|
2904 |
value: 26.593813046402154
|
@@ -2911,6 +3058,7 @@ model-index:
|
|
2911 |
name: MTEB MassiveScenarioClassification (th)
|
2912 |
config: th
|
2913 |
split: test
|
|
|
2914 |
metrics:
|
2915 |
- type: accuracy
|
2916 |
value: 36.788836583725626
|
@@ -2923,6 +3071,7 @@ model-index:
|
|
2923 |
name: MTEB MassiveScenarioClassification (tl)
|
2924 |
config: tl
|
2925 |
split: test
|
|
|
2926 |
metrics:
|
2927 |
- type: accuracy
|
2928 |
value: 42.5689307330195
|
@@ -2935,6 +3084,7 @@ model-index:
|
|
2935 |
name: MTEB MassiveScenarioClassification (tr)
|
2936 |
config: tr
|
2937 |
split: test
|
|
|
2938 |
metrics:
|
2939 |
- type: accuracy
|
2940 |
value: 37.09482178883658
|
@@ -2947,6 +3097,7 @@ model-index:
|
|
2947 |
name: MTEB MassiveScenarioClassification (ur)
|
2948 |
config: ur
|
2949 |
split: test
|
|
|
2950 |
metrics:
|
2951 |
- type: accuracy
|
2952 |
value: 28.836583725622063
|
@@ -2959,6 +3110,7 @@ model-index:
|
|
2959 |
name: MTEB MassiveScenarioClassification (vi)
|
2960 |
config: vi
|
2961 |
split: test
|
|
|
2962 |
metrics:
|
2963 |
- type: accuracy
|
2964 |
value: 37.357094821788834
|
@@ -2971,6 +3123,7 @@ model-index:
|
|
2971 |
name: MTEB MassiveScenarioClassification (zh-CN)
|
2972 |
config: zh-CN
|
2973 |
split: test
|
|
|
2974 |
metrics:
|
2975 |
- type: accuracy
|
2976 |
value: 49.37794216543375
|
@@ -2983,6 +3136,7 @@ model-index:
|
|
2983 |
name: MTEB MassiveScenarioClassification (zh-TW)
|
2984 |
config: zh-TW
|
2985 |
split: test
|
|
|
2986 |
metrics:
|
2987 |
- type: accuracy
|
2988 |
value: 44.42165433759248
|
@@ -2995,6 +3149,7 @@ model-index:
|
|
2995 |
name: MTEB MedrxivClusteringP2P
|
2996 |
config: default
|
2997 |
split: test
|
|
|
2998 |
metrics:
|
2999 |
- type: v_measure
|
3000 |
value: 31.374938993074252
|
@@ -3005,6 +3160,7 @@ model-index:
|
|
3005 |
name: MTEB MedrxivClusteringS2S
|
3006 |
config: default
|
3007 |
split: test
|
|
|
3008 |
metrics:
|
3009 |
- type: v_measure
|
3010 |
value: 26.871455379644093
|
@@ -3015,6 +3171,7 @@ model-index:
|
|
3015 |
name: MTEB MindSmallReranking
|
3016 |
config: default
|
3017 |
split: test
|
|
|
3018 |
metrics:
|
3019 |
- type: map
|
3020 |
value: 30.402396942935333
|
@@ -3027,6 +3184,7 @@ model-index:
|
|
3027 |
name: MTEB NFCorpus
|
3028 |
config: default
|
3029 |
split: test
|
|
|
3030 |
metrics:
|
3031 |
- type: map_at_1
|
3032 |
value: 3.7740000000000005
|
@@ -3095,6 +3253,7 @@ model-index:
|
|
3095 |
name: MTEB NQ
|
3096 |
config: default
|
3097 |
split: test
|
|
|
3098 |
metrics:
|
3099 |
- type: map_at_1
|
3100 |
value: 15.620999999999999
|
@@ -3163,6 +3322,7 @@ model-index:
|
|
3163 |
name: MTEB QuoraRetrieval
|
3164 |
config: default
|
3165 |
split: test
|
|
|
3166 |
metrics:
|
3167 |
- type: map_at_1
|
3168 |
value: 54.717000000000006
|
@@ -3231,6 +3391,7 @@ model-index:
|
|
3231 |
name: MTEB RedditClustering
|
3232 |
config: default
|
3233 |
split: test
|
|
|
3234 |
metrics:
|
3235 |
- type: v_measure
|
3236 |
value: 40.23390747226228
|
@@ -3241,6 +3402,7 @@ model-index:
|
|
3241 |
name: MTEB RedditClusteringP2P
|
3242 |
config: default
|
3243 |
split: test
|
|
|
3244 |
metrics:
|
3245 |
- type: v_measure
|
3246 |
value: 49.090518272935626
|
@@ -3251,6 +3413,7 @@ model-index:
|
|
3251 |
name: MTEB SCIDOCS
|
3252 |
config: default
|
3253 |
split: test
|
|
|
3254 |
metrics:
|
3255 |
- type: map_at_1
|
3256 |
value: 3.028
|
@@ -3319,6 +3482,7 @@ model-index:
|
|
3319 |
name: MTEB SICK-R
|
3320 |
config: default
|
3321 |
split: test
|
|
|
3322 |
metrics:
|
3323 |
- type: cos_sim_pearson
|
3324 |
value: 76.62983928119752
|
@@ -3339,6 +3503,7 @@ model-index:
|
|
3339 |
name: MTEB STS12
|
3340 |
config: default
|
3341 |
split: test
|
|
|
3342 |
metrics:
|
3343 |
- type: cos_sim_pearson
|
3344 |
value: 74.42679147085553
|
@@ -3359,6 +3524,7 @@ model-index:
|
|
3359 |
name: MTEB STS13
|
3360 |
config: default
|
3361 |
split: test
|
|
|
3362 |
metrics:
|
3363 |
- type: cos_sim_pearson
|
3364 |
value: 75.62472426599543
|
@@ -3379,6 +3545,7 @@ model-index:
|
|
3379 |
name: MTEB STS14
|
3380 |
config: default
|
3381 |
split: test
|
|
|
3382 |
metrics:
|
3383 |
- type: cos_sim_pearson
|
3384 |
value: 74.48227705407035
|
@@ -3399,6 +3566,7 @@ model-index:
|
|
3399 |
name: MTEB STS15
|
3400 |
config: default
|
3401 |
split: test
|
|
|
3402 |
metrics:
|
3403 |
- type: cos_sim_pearson
|
3404 |
value: 78.1566527175902
|
@@ -3419,6 +3587,7 @@ model-index:
|
|
3419 |
name: MTEB STS16
|
3420 |
config: default
|
3421 |
split: test
|
|
|
3422 |
metrics:
|
3423 |
- type: cos_sim_pearson
|
3424 |
value: 75.068454465977
|
@@ -3439,6 +3608,7 @@ model-index:
|
|
3439 |
name: MTEB STS17 (ko-ko)
|
3440 |
config: ko-ko
|
3441 |
split: test
|
|
|
3442 |
metrics:
|
3443 |
- type: cos_sim_pearson
|
3444 |
value: 39.43327289939437
|
@@ -3459,6 +3629,7 @@ model-index:
|
|
3459 |
name: MTEB STS17 (ar-ar)
|
3460 |
config: ar-ar
|
3461 |
split: test
|
|
|
3462 |
metrics:
|
3463 |
- type: cos_sim_pearson
|
3464 |
value: 55.54431928210687
|
@@ -3479,6 +3650,7 @@ model-index:
|
|
3479 |
name: MTEB STS17 (en-ar)
|
3480 |
config: en-ar
|
3481 |
split: test
|
|
|
3482 |
metrics:
|
3483 |
- type: cos_sim_pearson
|
3484 |
value: 11.378463868809098
|
@@ -3499,6 +3671,7 @@ model-index:
|
|
3499 |
name: MTEB STS17 (en-de)
|
3500 |
config: en-de
|
3501 |
split: test
|
|
|
3502 |
metrics:
|
3503 |
- type: cos_sim_pearson
|
3504 |
value: 32.71403560929013
|
@@ -3519,6 +3692,7 @@ model-index:
|
|
3519 |
name: MTEB STS17 (en-en)
|
3520 |
config: en-en
|
3521 |
split: test
|
|
|
3522 |
metrics:
|
3523 |
- type: cos_sim_pearson
|
3524 |
value: 83.36340470799158
|
@@ -3539,6 +3713,7 @@ model-index:
|
|
3539 |
name: MTEB STS17 (en-tr)
|
3540 |
config: en-tr
|
3541 |
split: test
|
|
|
3542 |
metrics:
|
3543 |
- type: cos_sim_pearson
|
3544 |
value: 1.9200044163754912
|
@@ -3559,6 +3734,7 @@ model-index:
|
|
3559 |
name: MTEB STS17 (es-en)
|
3560 |
config: es-en
|
3561 |
split: test
|
|
|
3562 |
metrics:
|
3563 |
- type: cos_sim_pearson
|
3564 |
value: 26.561262451099577
|
@@ -3579,6 +3755,7 @@ model-index:
|
|
3579 |
name: MTEB STS17 (es-es)
|
3580 |
config: es-es
|
3581 |
split: test
|
|
|
3582 |
metrics:
|
3583 |
- type: cos_sim_pearson
|
3584 |
value: 69.7544202001433
|
@@ -3599,6 +3776,7 @@ model-index:
|
|
3599 |
name: MTEB STS17 (fr-en)
|
3600 |
config: fr-en
|
3601 |
split: test
|
|
|
3602 |
metrics:
|
3603 |
- type: cos_sim_pearson
|
3604 |
value: 27.70511842301491
|
@@ -3619,6 +3797,7 @@ model-index:
|
|
3619 |
name: MTEB STS17 (it-en)
|
3620 |
config: it-en
|
3621 |
split: test
|
|
|
3622 |
metrics:
|
3623 |
- type: cos_sim_pearson
|
3624 |
value: 24.226521799447692
|
@@ -3639,6 +3818,7 @@ model-index:
|
|
3639 |
name: MTEB STS17 (nl-en)
|
3640 |
config: nl-en
|
3641 |
split: test
|
|
|
3642 |
metrics:
|
3643 |
- type: cos_sim_pearson
|
3644 |
value: 29.131412364061234
|
@@ -3659,6 +3839,7 @@ model-index:
|
|
3659 |
name: MTEB STS22 (en)
|
3660 |
config: en
|
3661 |
split: test
|
|
|
3662 |
metrics:
|
3663 |
- type: cos_sim_pearson
|
3664 |
value: 64.04750650962879
|
@@ -3679,6 +3860,7 @@ model-index:
|
|
3679 |
name: MTEB STS22 (de)
|
3680 |
config: de
|
3681 |
split: test
|
|
|
3682 |
metrics:
|
3683 |
- type: cos_sim_pearson
|
3684 |
value: 19.26519187000913
|
@@ -3699,6 +3881,7 @@ model-index:
|
|
3699 |
name: MTEB STS22 (es)
|
3700 |
config: es
|
3701 |
split: test
|
|
|
3702 |
metrics:
|
3703 |
- type: cos_sim_pearson
|
3704 |
value: 34.221261828226936
|
@@ -3719,6 +3902,7 @@ model-index:
|
|
3719 |
name: MTEB STS22 (pl)
|
3720 |
config: pl
|
3721 |
split: test
|
|
|
3722 |
metrics:
|
3723 |
- type: cos_sim_pearson
|
3724 |
value: 3.620381732096531
|
@@ -3739,6 +3923,7 @@ model-index:
|
|
3739 |
name: MTEB STS22 (tr)
|
3740 |
config: tr
|
3741 |
split: test
|
|
|
3742 |
metrics:
|
3743 |
- type: cos_sim_pearson
|
3744 |
value: 16.69489628726267
|
@@ -3759,6 +3944,7 @@ model-index:
|
|
3759 |
name: MTEB STS22 (ar)
|
3760 |
config: ar
|
3761 |
split: test
|
|
|
3762 |
metrics:
|
3763 |
- type: cos_sim_pearson
|
3764 |
value: 9.134927430889528
|
@@ -3779,6 +3965,7 @@ model-index:
|
|
3779 |
name: MTEB STS22 (ru)
|
3780 |
config: ru
|
3781 |
split: test
|
|
|
3782 |
metrics:
|
3783 |
- type: cos_sim_pearson
|
3784 |
value: 3.6386482942352085
|
@@ -3799,6 +3986,7 @@ model-index:
|
|
3799 |
name: MTEB STS22 (zh)
|
3800 |
config: zh
|
3801 |
split: test
|
|
|
3802 |
metrics:
|
3803 |
- type: cos_sim_pearson
|
3804 |
value: 2.972091574908432
|
@@ -3819,6 +4007,7 @@ model-index:
|
|
3819 |
name: MTEB STS22 (fr)
|
3820 |
config: fr
|
3821 |
split: test
|
|
|
3822 |
metrics:
|
3823 |
- type: cos_sim_pearson
|
3824 |
value: 54.4745185734135
|
@@ -3839,6 +4028,7 @@ model-index:
|
|
3839 |
name: MTEB STS22 (de-en)
|
3840 |
config: de-en
|
3841 |
split: test
|
|
|
3842 |
metrics:
|
3843 |
- type: cos_sim_pearson
|
3844 |
value: 49.37865412588201
|
@@ -3859,6 +4049,7 @@ model-index:
|
|
3859 |
name: MTEB STS22 (es-en)
|
3860 |
config: es-en
|
3861 |
split: test
|
|
|
3862 |
metrics:
|
3863 |
- type: cos_sim_pearson
|
3864 |
value: 44.925652392562135
|
@@ -3879,6 +4070,7 @@ model-index:
|
|
3879 |
name: MTEB STS22 (it)
|
3880 |
config: it
|
3881 |
split: test
|
|
|
3882 |
metrics:
|
3883 |
- type: cos_sim_pearson
|
3884 |
value: 45.241690321111875
|
@@ -3899,6 +4091,7 @@ model-index:
|
|
3899 |
name: MTEB STS22 (pl-en)
|
3900 |
config: pl-en
|
3901 |
split: test
|
|
|
3902 |
metrics:
|
3903 |
- type: cos_sim_pearson
|
3904 |
value: 36.42138324083909
|
@@ -3919,6 +4112,7 @@ model-index:
|
|
3919 |
name: MTEB STS22 (zh-en)
|
3920 |
config: zh-en
|
3921 |
split: test
|
|
|
3922 |
metrics:
|
3923 |
- type: cos_sim_pearson
|
3924 |
value: 26.55350664089358
|
@@ -3939,6 +4133,7 @@ model-index:
|
|
3939 |
name: MTEB STS22 (es-it)
|
3940 |
config: es-it
|
3941 |
split: test
|
|
|
3942 |
metrics:
|
3943 |
- type: cos_sim_pearson
|
3944 |
value: 38.54682179114309
|
@@ -3959,6 +4154,7 @@ model-index:
|
|
3959 |
name: MTEB STS22 (de-fr)
|
3960 |
config: de-fr
|
3961 |
split: test
|
|
|
3962 |
metrics:
|
3963 |
- type: cos_sim_pearson
|
3964 |
value: 35.12956772546032
|
@@ -3979,6 +4175,7 @@ model-index:
|
|
3979 |
name: MTEB STS22 (de-pl)
|
3980 |
config: de-pl
|
3981 |
split: test
|
|
|
3982 |
metrics:
|
3983 |
- type: cos_sim_pearson
|
3984 |
value: 30.507667380509634
|
@@ -3999,6 +4196,7 @@ model-index:
|
|
3999 |
name: MTEB STS22 (fr-pl)
|
4000 |
config: fr-pl
|
4001 |
split: test
|
|
|
4002 |
metrics:
|
4003 |
- type: cos_sim_pearson
|
4004 |
value: 71.10820459712156
|
@@ -4019,6 +4217,7 @@ model-index:
|
|
4019 |
name: MTEB STSBenchmark
|
4020 |
config: default
|
4021 |
split: test
|
|
|
4022 |
metrics:
|
4023 |
- type: cos_sim_pearson
|
4024 |
value: 76.53032504460737
|
@@ -4039,6 +4238,7 @@ model-index:
|
|
4039 |
name: MTEB SciDocsRR
|
4040 |
config: default
|
4041 |
split: test
|
|
|
4042 |
metrics:
|
4043 |
- type: map
|
4044 |
value: 71.33941904192648
|
@@ -4051,6 +4251,7 @@ model-index:
|
|
4051 |
name: MTEB SciFact
|
4052 |
config: default
|
4053 |
split: test
|
|
|
4054 |
metrics:
|
4055 |
- type: map_at_1
|
4056 |
value: 43.333
|
@@ -4119,6 +4320,7 @@ model-index:
|
|
4119 |
name: MTEB SprintDuplicateQuestions
|
4120 |
config: default
|
4121 |
split: test
|
|
|
4122 |
metrics:
|
4123 |
- type: cos_sim_accuracy
|
4124 |
value: 99.7
|
@@ -4173,6 +4375,7 @@ model-index:
|
|
4173 |
name: MTEB StackExchangeClustering
|
4174 |
config: default
|
4175 |
split: test
|
|
|
4176 |
metrics:
|
4177 |
- type: v_measure
|
4178 |
value: 52.74481093815175
|
@@ -4183,6 +4386,7 @@ model-index:
|
|
4183 |
name: MTEB StackExchangeClusteringP2P
|
4184 |
config: default
|
4185 |
split: test
|
|
|
4186 |
metrics:
|
4187 |
- type: v_measure
|
4188 |
value: 32.65999453562101
|
@@ -4193,6 +4397,7 @@ model-index:
|
|
4193 |
name: MTEB StackOverflowDupQuestions
|
4194 |
config: default
|
4195 |
split: test
|
|
|
4196 |
metrics:
|
4197 |
- type: map
|
4198 |
value: 44.74498464555465
|
@@ -4205,6 +4410,7 @@ model-index:
|
|
4205 |
name: MTEB SummEval
|
4206 |
config: default
|
4207 |
split: test
|
|
|
4208 |
metrics:
|
4209 |
- type: cos_sim_pearson
|
4210 |
value: 29.5961822471627
|
@@ -4221,6 +4427,7 @@ model-index:
|
|
4221 |
name: MTEB TRECCOVID
|
4222 |
config: default
|
4223 |
split: test
|
|
|
4224 |
metrics:
|
4225 |
- type: map_at_1
|
4226 |
value: 0.241
|
@@ -4289,6 +4496,7 @@ model-index:
|
|
4289 |
name: MTEB Touche2020
|
4290 |
config: default
|
4291 |
split: test
|
|
|
4292 |
metrics:
|
4293 |
- type: map_at_1
|
4294 |
value: 2.782
|
@@ -4357,6 +4565,7 @@ model-index:
|
|
4357 |
name: MTEB ToxicConversationsClassification
|
4358 |
config: default
|
4359 |
split: test
|
|
|
4360 |
metrics:
|
4361 |
- type: accuracy
|
4362 |
value: 62.657999999999994
|
@@ -4371,6 +4580,7 @@ model-index:
|
|
4371 |
name: MTEB TweetSentimentExtractionClassification
|
4372 |
config: default
|
4373 |
split: test
|
|
|
4374 |
metrics:
|
4375 |
- type: accuracy
|
4376 |
value: 52.40803621958121
|
@@ -4383,6 +4593,7 @@ model-index:
|
|
4383 |
name: MTEB TwentyNewsgroupsClustering
|
4384 |
config: default
|
4385 |
split: test
|
|
|
4386 |
metrics:
|
4387 |
- type: v_measure
|
4388 |
value: 32.12697126747911
|
@@ -4393,6 +4604,7 @@ model-index:
|
|
4393 |
name: MTEB TwitterSemEval2015
|
4394 |
config: default
|
4395 |
split: test
|
|
|
4396 |
metrics:
|
4397 |
- type: cos_sim_accuracy
|
4398 |
value: 80.69976753889253
|
@@ -4447,6 +4659,7 @@ model-index:
|
|
4447 |
name: MTEB TwitterURLCorpus
|
4448 |
config: default
|
4449 |
split: test
|
|
|
4450 |
metrics:
|
4451 |
- type: cos_sim_accuracy
|
4452 |
value: 86.90573213800597
|
|
|
15 |
name: MTEB AmazonCounterfactualClassification (en)
|
16 |
config: en
|
17 |
split: test
|
18 |
+
revision: 2d8a100785abf0ae21420d2a55b0c56e3e1ea996
|
19 |
metrics:
|
20 |
- type: accuracy
|
21 |
value: 61.23880597014926
|
|
|
30 |
name: MTEB AmazonCounterfactualClassification (de)
|
31 |
config: de
|
32 |
split: test
|
33 |
+
revision: 2d8a100785abf0ae21420d2a55b0c56e3e1ea996
|
34 |
metrics:
|
35 |
- type: accuracy
|
36 |
value: 56.88436830835117
|
|
|
45 |
name: MTEB AmazonCounterfactualClassification (en-ext)
|
46 |
config: en-ext
|
47 |
split: test
|
48 |
+
revision: 2d8a100785abf0ae21420d2a55b0c56e3e1ea996
|
49 |
metrics:
|
50 |
- type: accuracy
|
51 |
value: 58.27586206896551
|
|
|
60 |
name: MTEB AmazonCounterfactualClassification (ja)
|
61 |
config: ja
|
62 |
split: test
|
63 |
+
revision: 2d8a100785abf0ae21420d2a55b0c56e3e1ea996
|
64 |
metrics:
|
65 |
- type: accuracy
|
66 |
value: 54.64668094218415
|
|
|
75 |
name: MTEB AmazonPolarityClassification
|
76 |
config: default
|
77 |
split: test
|
78 |
+
revision: 80714f8dcf8cefc218ef4f8c5a966dd83f75a0e1
|
79 |
metrics:
|
80 |
- type: accuracy
|
81 |
value: 65.401225
|
|
|
90 |
name: MTEB AmazonReviewsClassification (en)
|
91 |
config: en
|
92 |
split: test
|
93 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
94 |
metrics:
|
95 |
- type: accuracy
|
96 |
value: 31.165999999999993
|
|
|
103 |
name: MTEB AmazonReviewsClassification (de)
|
104 |
config: de
|
105 |
split: test
|
106 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
107 |
metrics:
|
108 |
- type: accuracy
|
109 |
value: 24.79
|
|
|
116 |
name: MTEB AmazonReviewsClassification (es)
|
117 |
config: es
|
118 |
split: test
|
119 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
120 |
metrics:
|
121 |
- type: accuracy
|
122 |
value: 26.643999999999995
|
|
|
129 |
name: MTEB AmazonReviewsClassification (fr)
|
130 |
config: fr
|
131 |
split: test
|
132 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
133 |
metrics:
|
134 |
- type: accuracy
|
135 |
value: 26.386000000000003
|
|
|
142 |
name: MTEB AmazonReviewsClassification (ja)
|
143 |
config: ja
|
144 |
split: test
|
145 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
146 |
metrics:
|
147 |
- type: accuracy
|
148 |
value: 22.078000000000003
|
|
|
155 |
name: MTEB AmazonReviewsClassification (zh)
|
156 |
config: zh
|
157 |
split: test
|
158 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
159 |
metrics:
|
160 |
- type: accuracy
|
161 |
value: 24.274
|
|
|
168 |
name: MTEB ArguAna
|
169 |
config: default
|
170 |
split: test
|
171 |
+
revision: 5b3e3697907184a9b77a3c99ee9ea1a9cbb1e4e3
|
172 |
metrics:
|
173 |
- type: map_at_1
|
174 |
value: 22.404
|
|
|
237 |
name: MTEB ArxivClusteringP2P
|
238 |
config: default
|
239 |
split: test
|
240 |
+
revision: 0bbdb47bcbe3a90093699aefeed338a0f28a7ee8
|
241 |
metrics:
|
242 |
- type: v_measure
|
243 |
value: 39.70858340673288
|
|
|
248 |
name: MTEB ArxivClusteringS2S
|
249 |
config: default
|
250 |
split: test
|
251 |
+
revision: b73bd54100e5abfa6e3a23dcafb46fe4d2438dc3
|
252 |
metrics:
|
253 |
- type: v_measure
|
254 |
value: 28.242847713721048
|
|
|
259 |
name: MTEB AskUbuntuDupQuestions
|
260 |
config: default
|
261 |
split: test
|
262 |
+
revision: 4d853f94cd57d85ec13805aeeac3ae3e5eb4c49c
|
263 |
metrics:
|
264 |
- type: map
|
265 |
value: 55.83700395192393
|
|
|
272 |
name: MTEB BIOSSES
|
273 |
config: default
|
274 |
split: test
|
275 |
+
revision: 9ee918f184421b6bd48b78f6c714d86546106103
|
276 |
metrics:
|
277 |
- type: cos_sim_pearson
|
278 |
value: 79.25366801756223
|
|
|
293 |
name: MTEB Banking77Classification
|
294 |
config: default
|
295 |
split: test
|
296 |
+
revision: 44fa15921b4c889113cc5df03dd4901b49161ab7
|
297 |
metrics:
|
298 |
- type: accuracy
|
299 |
value: 77.70454545454545
|
|
|
306 |
name: MTEB BiorxivClusteringP2P
|
307 |
config: default
|
308 |
split: test
|
309 |
+
revision: 11d0121201d1f1f280e8cc8f3d98fb9c4d9f9c55
|
310 |
metrics:
|
311 |
- type: v_measure
|
312 |
value: 33.63260395543984
|
|
|
317 |
name: MTEB BiorxivClusteringS2S
|
318 |
config: default
|
319 |
split: test
|
320 |
+
revision: c0fab014e1bcb8d3a5e31b2088972a1e01547dc1
|
321 |
metrics:
|
322 |
- type: v_measure
|
323 |
value: 27.038042665369925
|
|
|
328 |
name: MTEB CQADupstackAndroidRetrieval
|
329 |
config: default
|
330 |
split: test
|
331 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
332 |
metrics:
|
333 |
- type: map_at_1
|
334 |
value: 22.139
|
|
|
397 |
name: MTEB CQADupstackEnglishRetrieval
|
398 |
config: default
|
399 |
split: test
|
400 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
401 |
metrics:
|
402 |
- type: map_at_1
|
403 |
value: 20.652
|
|
|
466 |
name: MTEB CQADupstackGamingRetrieval
|
467 |
config: default
|
468 |
split: test
|
469 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
470 |
metrics:
|
471 |
- type: map_at_1
|
472 |
value: 25.180000000000003
|
|
|
535 |
name: MTEB CQADupstackGisRetrieval
|
536 |
config: default
|
537 |
split: test
|
538 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
539 |
metrics:
|
540 |
- type: map_at_1
|
541 |
value: 16.303
|
|
|
604 |
name: MTEB CQADupstackMathematicaRetrieval
|
605 |
config: default
|
606 |
split: test
|
607 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
608 |
metrics:
|
609 |
- type: map_at_1
|
610 |
value: 10.133000000000001
|
|
|
673 |
name: MTEB CQADupstackPhysicsRetrieval
|
674 |
config: default
|
675 |
split: test
|
676 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
677 |
metrics:
|
678 |
- type: map_at_1
|
679 |
value: 19.991999999999997
|
|
|
742 |
name: MTEB CQADupstackProgrammersRetrieval
|
743 |
config: default
|
744 |
split: test
|
745 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
746 |
metrics:
|
747 |
- type: map_at_1
|
748 |
value: 17.896
|
|
|
811 |
name: MTEB CQADupstackRetrieval
|
812 |
config: default
|
813 |
split: test
|
814 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
815 |
metrics:
|
816 |
- type: map_at_1
|
817 |
value: 17.195166666666665
|
|
|
880 |
name: MTEB CQADupstackStatsRetrieval
|
881 |
config: default
|
882 |
split: test
|
883 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
884 |
metrics:
|
885 |
- type: map_at_1
|
886 |
value: 16.779
|
|
|
949 |
name: MTEB CQADupstackTexRetrieval
|
950 |
config: default
|
951 |
split: test
|
952 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
953 |
metrics:
|
954 |
- type: map_at_1
|
955 |
value: 9.279
|
|
|
1018 |
name: MTEB CQADupstackUnixRetrieval
|
1019 |
config: default
|
1020 |
split: test
|
1021 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
1022 |
metrics:
|
1023 |
- type: map_at_1
|
1024 |
value: 16.36
|
|
|
1087 |
name: MTEB CQADupstackWebmastersRetrieval
|
1088 |
config: default
|
1089 |
split: test
|
1090 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
1091 |
metrics:
|
1092 |
- type: map_at_1
|
1093 |
value: 17.39
|
|
|
1156 |
name: MTEB CQADupstackWordpressRetrieval
|
1157 |
config: default
|
1158 |
split: test
|
1159 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
1160 |
metrics:
|
1161 |
- type: map_at_1
|
1162 |
value: 14.238999999999999
|
|
|
1225 |
name: MTEB ClimateFEVER
|
1226 |
config: default
|
1227 |
split: test
|
1228 |
+
revision: 392b78eb68c07badcd7c2cd8f39af108375dfcce
|
1229 |
metrics:
|
1230 |
- type: map_at_1
|
1231 |
value: 8.828
|
|
|
1294 |
name: MTEB DBPedia
|
1295 |
config: default
|
1296 |
split: test
|
1297 |
+
revision: f097057d03ed98220bc7309ddb10b71a54d667d6
|
1298 |
metrics:
|
1299 |
- type: map_at_1
|
1300 |
value: 5.586
|
|
|
1363 |
name: MTEB EmotionClassification
|
1364 |
config: default
|
1365 |
split: test
|
1366 |
+
revision: 829147f8f75a25f005913200eb5ed41fae320aa1
|
1367 |
metrics:
|
1368 |
- type: accuracy
|
1369 |
value: 39.075
|
|
|
1376 |
name: MTEB FEVER
|
1377 |
config: default
|
1378 |
split: test
|
1379 |
+
revision: 1429cf27e393599b8b359b9b72c666f96b2525f9
|
1380 |
metrics:
|
1381 |
- type: map_at_1
|
1382 |
value: 43.519999999999996
|
|
|
1445 |
name: MTEB FiQA2018
|
1446 |
config: default
|
1447 |
split: test
|
1448 |
+
revision: 41b686a7f28c59bcaaa5791efd47c67c8ebe28be
|
1449 |
metrics:
|
1450 |
- type: map_at_1
|
1451 |
value: 9.549000000000001
|
|
|
1514 |
name: MTEB HotpotQA
|
1515 |
config: default
|
1516 |
split: test
|
1517 |
+
revision: 766870b35a1b9ca65e67a0d1913899973551fc6c
|
1518 |
metrics:
|
1519 |
- type: map_at_1
|
1520 |
value: 25.544
|
|
|
1583 |
name: MTEB ImdbClassification
|
1584 |
config: default
|
1585 |
split: test
|
1586 |
+
revision: 8d743909f834c38949e8323a8a6ce8721ea6c7f4
|
1587 |
metrics:
|
1588 |
- type: accuracy
|
1589 |
value: 58.6696
|
|
|
1598 |
name: MTEB MSMARCO
|
1599 |
config: default
|
1600 |
split: validation
|
1601 |
+
revision: e6838a846e2408f22cf5cc337ebc83e0bcf77849
|
1602 |
metrics:
|
1603 |
- type: map_at_1
|
1604 |
value: 14.442
|
|
|
1667 |
name: MTEB MTOPDomainClassification (en)
|
1668 |
config: en
|
1669 |
split: test
|
1670 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1671 |
metrics:
|
1672 |
- type: accuracy
|
1673 |
value: 86.95622435020519
|
|
|
1680 |
name: MTEB MTOPDomainClassification (de)
|
1681 |
config: de
|
1682 |
split: test
|
1683 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1684 |
metrics:
|
1685 |
- type: accuracy
|
1686 |
value: 62.73034657650043
|
|
|
1693 |
name: MTEB MTOPDomainClassification (es)
|
1694 |
config: es
|
1695 |
split: test
|
1696 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1697 |
metrics:
|
1698 |
- type: accuracy
|
1699 |
value: 67.54503002001334
|
|
|
1706 |
name: MTEB MTOPDomainClassification (fr)
|
1707 |
config: fr
|
1708 |
split: test
|
1709 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1710 |
metrics:
|
1711 |
- type: accuracy
|
1712 |
value: 65.35233322893829
|
|
|
1719 |
name: MTEB MTOPDomainClassification (hi)
|
1720 |
config: hi
|
1721 |
split: test
|
1722 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1723 |
metrics:
|
1724 |
- type: accuracy
|
1725 |
value: 45.37110075295806
|
|
|
1732 |
name: MTEB MTOPDomainClassification (th)
|
1733 |
config: th
|
1734 |
split: test
|
1735 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1736 |
metrics:
|
1737 |
- type: accuracy
|
1738 |
value: 55.276672694394215
|
|
|
1745 |
name: MTEB MTOPIntentClassification (en)
|
1746 |
config: en
|
1747 |
split: test
|
1748 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1749 |
metrics:
|
1750 |
- type: accuracy
|
1751 |
value: 62.25262197902417
|
|
|
1758 |
name: MTEB MTOPIntentClassification (de)
|
1759 |
config: de
|
1760 |
split: test
|
1761 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1762 |
metrics:
|
1763 |
- type: accuracy
|
1764 |
value: 49.56043956043956
|
|
|
1771 |
name: MTEB MTOPIntentClassification (es)
|
1772 |
config: es
|
1773 |
split: test
|
1774 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1775 |
metrics:
|
1776 |
- type: accuracy
|
1777 |
value: 49.93995997331555
|
|
|
1784 |
name: MTEB MTOPIntentClassification (fr)
|
1785 |
config: fr
|
1786 |
split: test
|
1787 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1788 |
metrics:
|
1789 |
- type: accuracy
|
1790 |
value: 46.32947071719386
|
|
|
1797 |
name: MTEB MTOPIntentClassification (hi)
|
1798 |
config: hi
|
1799 |
split: test
|
1800 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1801 |
metrics:
|
1802 |
- type: accuracy
|
1803 |
value: 32.208676945141626
|
|
|
1810 |
name: MTEB MTOPIntentClassification (th)
|
1811 |
config: th
|
1812 |
split: test
|
1813 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1814 |
metrics:
|
1815 |
- type: accuracy
|
1816 |
value: 43.627486437613015
|
|
|
1823 |
name: MTEB MassiveIntentClassification (af)
|
1824 |
config: af
|
1825 |
split: test
|
1826 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1827 |
metrics:
|
1828 |
- type: accuracy
|
1829 |
value: 40.548083389374575
|
|
|
1836 |
name: MTEB MassiveIntentClassification (am)
|
1837 |
config: am
|
1838 |
split: test
|
1839 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1840 |
metrics:
|
1841 |
- type: accuracy
|
1842 |
value: 24.18291862811029
|
|
|
1849 |
name: MTEB MassiveIntentClassification (ar)
|
1850 |
config: ar
|
1851 |
split: test
|
1852 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1853 |
metrics:
|
1854 |
- type: accuracy
|
1855 |
value: 30.134498991257562
|
|
|
1862 |
name: MTEB MassiveIntentClassification (az)
|
1863 |
config: az
|
1864 |
split: test
|
1865 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1866 |
metrics:
|
1867 |
- type: accuracy
|
1868 |
value: 35.88433086751849
|
|
|
1875 |
name: MTEB MassiveIntentClassification (bn)
|
1876 |
config: bn
|
1877 |
split: test
|
1878 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1879 |
metrics:
|
1880 |
- type: accuracy
|
1881 |
value: 29.17283120376597
|
|
|
1888 |
name: MTEB MassiveIntentClassification (cy)
|
1889 |
config: cy
|
1890 |
split: test
|
1891 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1892 |
metrics:
|
1893 |
- type: accuracy
|
1894 |
value: 41.788836583725626
|
|
|
1901 |
name: MTEB MassiveIntentClassification (da)
|
1902 |
config: da
|
1903 |
split: test
|
1904 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1905 |
metrics:
|
1906 |
- type: accuracy
|
1907 |
value: 44.176193678547406
|
|
|
1914 |
name: MTEB MassiveIntentClassification (de)
|
1915 |
config: de
|
1916 |
split: test
|
1917 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1918 |
metrics:
|
1919 |
- type: accuracy
|
1920 |
value: 42.07464694014795
|
|
|
1927 |
name: MTEB MassiveIntentClassification (el)
|
1928 |
config: el
|
1929 |
split: test
|
1930 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1931 |
metrics:
|
1932 |
- type: accuracy
|
1933 |
value: 36.254203093476804
|
|
|
1940 |
name: MTEB MassiveIntentClassification (en)
|
1941 |
config: en
|
1942 |
split: test
|
1943 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1944 |
metrics:
|
1945 |
- type: accuracy
|
1946 |
value: 61.40887693342301
|
|
|
1953 |
name: MTEB MassiveIntentClassification (es)
|
1954 |
config: es
|
1955 |
split: test
|
1956 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1957 |
metrics:
|
1958 |
- type: accuracy
|
1959 |
value: 42.679892400807
|
|
|
1966 |
name: MTEB MassiveIntentClassification (fa)
|
1967 |
config: fa
|
1968 |
split: test
|
1969 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1970 |
metrics:
|
1971 |
- type: accuracy
|
1972 |
value: 35.59179556153329
|
|
|
1979 |
name: MTEB MassiveIntentClassification (fi)
|
1980 |
config: fi
|
1981 |
split: test
|
1982 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1983 |
metrics:
|
1984 |
- type: accuracy
|
1985 |
value: 40.036987222595826
|
|
|
1992 |
name: MTEB MassiveIntentClassification (fr)
|
1993 |
config: fr
|
1994 |
split: test
|
1995 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1996 |
metrics:
|
1997 |
- type: accuracy
|
1998 |
value: 43.43981170141224
|
|
|
2005 |
name: MTEB MassiveIntentClassification (he)
|
2006 |
config: he
|
2007 |
split: test
|
2008 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2009 |
metrics:
|
2010 |
- type: accuracy
|
2011 |
value: 31.593813046402154
|
|
|
2018 |
name: MTEB MassiveIntentClassification (hi)
|
2019 |
config: hi
|
2020 |
split: test
|
2021 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2022 |
metrics:
|
2023 |
- type: accuracy
|
2024 |
value: 27.044384667114997
|
|
|
2031 |
name: MTEB MassiveIntentClassification (hu)
|
2032 |
config: hu
|
2033 |
split: test
|
2034 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2035 |
metrics:
|
2036 |
- type: accuracy
|
2037 |
value: 38.453261600538
|
|
|
2044 |
name: MTEB MassiveIntentClassification (hy)
|
2045 |
config: hy
|
2046 |
split: test
|
2047 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2048 |
metrics:
|
2049 |
- type: accuracy
|
2050 |
value: 27.979152656355076
|
|
|
2057 |
name: MTEB MassiveIntentClassification (id)
|
2058 |
config: id
|
2059 |
split: test
|
2060 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2061 |
metrics:
|
2062 |
- type: accuracy
|
2063 |
value: 43.97108271687963
|
|
|
2070 |
name: MTEB MassiveIntentClassification (is)
|
2071 |
config: is
|
2072 |
split: test
|
2073 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2074 |
metrics:
|
2075 |
- type: accuracy
|
2076 |
value: 40.302622730329524
|
|
|
2083 |
name: MTEB MassiveIntentClassification (it)
|
2084 |
config: it
|
2085 |
split: test
|
2086 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2087 |
metrics:
|
2088 |
- type: accuracy
|
2089 |
value: 45.474108944182916
|
|
|
2096 |
name: MTEB MassiveIntentClassification (ja)
|
2097 |
config: ja
|
2098 |
split: test
|
2099 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2100 |
metrics:
|
2101 |
- type: accuracy
|
2102 |
value: 45.60860793544048
|
|
|
2109 |
name: MTEB MassiveIntentClassification (jv)
|
2110 |
config: jv
|
2111 |
split: test
|
2112 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2113 |
metrics:
|
2114 |
- type: accuracy
|
2115 |
value: 38.668459986550104
|
|
|
2122 |
name: MTEB MassiveIntentClassification (ka)
|
2123 |
config: ka
|
2124 |
split: test
|
2125 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2126 |
metrics:
|
2127 |
- type: accuracy
|
2128 |
value: 25.6523201075992
|
|
|
2135 |
name: MTEB MassiveIntentClassification (km)
|
2136 |
config: km
|
2137 |
split: test
|
2138 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2139 |
metrics:
|
2140 |
- type: accuracy
|
2141 |
value: 28.295225285810353
|
|
|
2148 |
name: MTEB MassiveIntentClassification (kn)
|
2149 |
config: kn
|
2150 |
split: test
|
2151 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2152 |
metrics:
|
2153 |
- type: accuracy
|
2154 |
value: 23.480161398789505
|
|
|
2161 |
name: MTEB MassiveIntentClassification (ko)
|
2162 |
config: ko
|
2163 |
split: test
|
2164 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2165 |
metrics:
|
2166 |
- type: accuracy
|
2167 |
value: 36.55682582380632
|
|
|
2174 |
name: MTEB MassiveIntentClassification (lv)
|
2175 |
config: lv
|
2176 |
split: test
|
2177 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2178 |
metrics:
|
2179 |
- type: accuracy
|
2180 |
value: 41.84936112979153
|
|
|
2187 |
name: MTEB MassiveIntentClassification (ml)
|
2188 |
config: ml
|
2189 |
split: test
|
2190 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2191 |
metrics:
|
2192 |
- type: accuracy
|
2193 |
value: 24.90921318090114
|
|
|
2200 |
name: MTEB MassiveIntentClassification (mn)
|
2201 |
config: mn
|
2202 |
split: test
|
2203 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2204 |
metrics:
|
2205 |
- type: accuracy
|
2206 |
value: 29.86213853396099
|
|
|
2213 |
name: MTEB MassiveIntentClassification (ms)
|
2214 |
config: ms
|
2215 |
split: test
|
2216 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2217 |
metrics:
|
2218 |
- type: accuracy
|
2219 |
value: 42.42098184263618
|
|
|
2226 |
name: MTEB MassiveIntentClassification (my)
|
2227 |
config: my
|
2228 |
split: test
|
2229 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2230 |
metrics:
|
2231 |
- type: accuracy
|
2232 |
value: 25.131136516476126
|
|
|
2239 |
name: MTEB MassiveIntentClassification (nb)
|
2240 |
config: nb
|
2241 |
split: test
|
2242 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2243 |
metrics:
|
2244 |
- type: accuracy
|
2245 |
value: 39.81506388702084
|
|
|
2252 |
name: MTEB MassiveIntentClassification (nl)
|
2253 |
config: nl
|
2254 |
split: test
|
2255 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2256 |
metrics:
|
2257 |
- type: accuracy
|
2258 |
value: 43.62138533960995
|
|
|
2265 |
name: MTEB MassiveIntentClassification (pl)
|
2266 |
config: pl
|
2267 |
split: test
|
2268 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2269 |
metrics:
|
2270 |
- type: accuracy
|
2271 |
value: 42.19569603227976
|
|
|
2278 |
name: MTEB MassiveIntentClassification (pt)
|
2279 |
config: pt
|
2280 |
split: test
|
2281 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2282 |
metrics:
|
2283 |
- type: accuracy
|
2284 |
value: 45.20847343644923
|
|
|
2291 |
name: MTEB MassiveIntentClassification (ro)
|
2292 |
config: ro
|
2293 |
split: test
|
2294 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2295 |
metrics:
|
2296 |
- type: accuracy
|
2297 |
value: 41.80901143241426
|
|
|
2304 |
name: MTEB MassiveIntentClassification (ru)
|
2305 |
config: ru
|
2306 |
split: test
|
2307 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2308 |
metrics:
|
2309 |
- type: accuracy
|
2310 |
value: 35.96839273705447
|
|
|
2317 |
name: MTEB MassiveIntentClassification (sl)
|
2318 |
config: sl
|
2319 |
split: test
|
2320 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2321 |
metrics:
|
2322 |
- type: accuracy
|
2323 |
value: 40.60524546065905
|
|
|
2330 |
name: MTEB MassiveIntentClassification (sq)
|
2331 |
config: sq
|
2332 |
split: test
|
2333 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2334 |
metrics:
|
2335 |
- type: accuracy
|
2336 |
value: 42.75722932078009
|
|
|
2343 |
name: MTEB MassiveIntentClassification (sv)
|
2344 |
config: sv
|
2345 |
split: test
|
2346 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2347 |
metrics:
|
2348 |
- type: accuracy
|
2349 |
value: 42.347007397444514
|
|
|
2356 |
name: MTEB MassiveIntentClassification (sw)
|
2357 |
config: sw
|
2358 |
split: test
|
2359 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2360 |
metrics:
|
2361 |
- type: accuracy
|
2362 |
value: 41.12306657700067
|
|
|
2369 |
name: MTEB MassiveIntentClassification (ta)
|
2370 |
config: ta
|
2371 |
split: test
|
2372 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2373 |
metrics:
|
2374 |
- type: accuracy
|
2375 |
value: 24.603227975790183
|
|
|
2382 |
name: MTEB MassiveIntentClassification (te)
|
2383 |
config: te
|
2384 |
split: test
|
2385 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2386 |
metrics:
|
2387 |
- type: accuracy
|
2388 |
value: 25.03698722259583
|
|
|
2395 |
name: MTEB MassiveIntentClassification (th)
|
2396 |
config: th
|
2397 |
split: test
|
2398 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2399 |
metrics:
|
2400 |
- type: accuracy
|
2401 |
value: 35.40013449899126
|
|
|
2408 |
name: MTEB MassiveIntentClassification (tl)
|
2409 |
config: tl
|
2410 |
split: test
|
2411 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2412 |
metrics:
|
2413 |
- type: accuracy
|
2414 |
value: 41.19031607262945
|
|
|
2421 |
name: MTEB MassiveIntentClassification (tr)
|
2422 |
config: tr
|
2423 |
split: test
|
2424 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2425 |
metrics:
|
2426 |
- type: accuracy
|
2427 |
value: 36.405514458641555
|
|
|
2434 |
name: MTEB MassiveIntentClassification (ur)
|
2435 |
config: ur
|
2436 |
split: test
|
2437 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2438 |
metrics:
|
2439 |
- type: accuracy
|
2440 |
value: 25.934767989240076
|
|
|
2447 |
name: MTEB MassiveIntentClassification (vi)
|
2448 |
config: vi
|
2449 |
split: test
|
2450 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2451 |
metrics:
|
2452 |
- type: accuracy
|
2453 |
value: 38.79959650302622
|
|
|
2460 |
name: MTEB MassiveIntentClassification (zh-CN)
|
2461 |
config: zh-CN
|
2462 |
split: test
|
2463 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2464 |
metrics:
|
2465 |
- type: accuracy
|
2466 |
value: 46.244115669132476
|
|
|
2473 |
name: MTEB MassiveIntentClassification (zh-TW)
|
2474 |
config: zh-TW
|
2475 |
split: test
|
2476 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2477 |
metrics:
|
2478 |
- type: accuracy
|
2479 |
value: 42.30665770006724
|
|
|
2486 |
name: MTEB MassiveScenarioClassification (af)
|
2487 |
config: af
|
2488 |
split: test
|
2489 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2490 |
metrics:
|
2491 |
- type: accuracy
|
2492 |
value: 43.2481506388702
|
|
|
2499 |
name: MTEB MassiveScenarioClassification (am)
|
2500 |
config: am
|
2501 |
split: test
|
2502 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2503 |
metrics:
|
2504 |
- type: accuracy
|
2505 |
value: 25.30262273032952
|
|
|
2512 |
name: MTEB MassiveScenarioClassification (ar)
|
2513 |
config: ar
|
2514 |
split: test
|
2515 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2516 |
metrics:
|
2517 |
- type: accuracy
|
2518 |
value: 32.07128446536651
|
|
|
2525 |
name: MTEB MassiveScenarioClassification (az)
|
2526 |
config: az
|
2527 |
split: test
|
2528 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2529 |
metrics:
|
2530 |
- type: accuracy
|
2531 |
value: 36.681237390719566
|
|
|
2538 |
name: MTEB MassiveScenarioClassification (bn)
|
2539 |
config: bn
|
2540 |
split: test
|
2541 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2542 |
metrics:
|
2543 |
- type: accuracy
|
2544 |
value: 29.56624075319435
|
|
|
2551 |
name: MTEB MassiveScenarioClassification (cy)
|
2552 |
config: cy
|
2553 |
split: test
|
2554 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2555 |
metrics:
|
2556 |
- type: accuracy
|
2557 |
value: 42.1049092131809
|
|
|
2564 |
name: MTEB MassiveScenarioClassification (da)
|
2565 |
config: da
|
2566 |
split: test
|
2567 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2568 |
metrics:
|
2569 |
- type: accuracy
|
2570 |
value: 45.44384667114997
|
|
|
2577 |
name: MTEB MassiveScenarioClassification (de)
|
2578 |
config: de
|
2579 |
split: test
|
2580 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2581 |
metrics:
|
2582 |
- type: accuracy
|
2583 |
value: 43.211163416274374
|
|
|
2590 |
name: MTEB MassiveScenarioClassification (el)
|
2591 |
config: el
|
2592 |
split: test
|
2593 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2594 |
metrics:
|
2595 |
- type: accuracy
|
2596 |
value: 36.503026227303295
|
|
|
2603 |
name: MTEB MassiveScenarioClassification (en)
|
2604 |
config: en
|
2605 |
split: test
|
2606 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2607 |
metrics:
|
2608 |
- type: accuracy
|
2609 |
value: 69.73772696704773
|
|
|
2616 |
name: MTEB MassiveScenarioClassification (es)
|
2617 |
config: es
|
2618 |
split: test
|
2619 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2620 |
metrics:
|
2621 |
- type: accuracy
|
2622 |
value: 44.078681909885674
|
|
|
2629 |
name: MTEB MassiveScenarioClassification (fa)
|
2630 |
config: fa
|
2631 |
split: test
|
2632 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2633 |
metrics:
|
2634 |
- type: accuracy
|
2635 |
value: 32.61264290517821
|
|
|
2642 |
name: MTEB MassiveScenarioClassification (fi)
|
2643 |
config: fi
|
2644 |
split: test
|
2645 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2646 |
metrics:
|
2647 |
- type: accuracy
|
2648 |
value: 40.35642232683255
|
|
|
2655 |
name: MTEB MassiveScenarioClassification (fr)
|
2656 |
config: fr
|
2657 |
split: test
|
2658 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2659 |
metrics:
|
2660 |
- type: accuracy
|
2661 |
value: 45.06724949562878
|
|
|
2668 |
name: MTEB MassiveScenarioClassification (he)
|
2669 |
config: he
|
2670 |
split: test
|
2671 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2672 |
metrics:
|
2673 |
- type: accuracy
|
2674 |
value: 32.178883658372555
|
|
|
2681 |
name: MTEB MassiveScenarioClassification (hi)
|
2682 |
config: hi
|
2683 |
split: test
|
2684 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2685 |
metrics:
|
2686 |
- type: accuracy
|
2687 |
value: 26.903160726294555
|
|
|
2694 |
name: MTEB MassiveScenarioClassification (hu)
|
2695 |
config: hu
|
2696 |
split: test
|
2697 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2698 |
metrics:
|
2699 |
- type: accuracy
|
2700 |
value: 40.379959650302624
|
|
|
2707 |
name: MTEB MassiveScenarioClassification (hy)
|
2708 |
config: hy
|
2709 |
split: test
|
2710 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2711 |
metrics:
|
2712 |
- type: accuracy
|
2713 |
value: 28.375924680564896
|
|
|
2720 |
name: MTEB MassiveScenarioClassification (id)
|
2721 |
config: id
|
2722 |
split: test
|
2723 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2724 |
metrics:
|
2725 |
- type: accuracy
|
2726 |
value: 44.361129791526565
|
|
|
2733 |
name: MTEB MassiveScenarioClassification (is)
|
2734 |
config: is
|
2735 |
split: test
|
2736 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2737 |
metrics:
|
2738 |
- type: accuracy
|
2739 |
value: 39.290517821116346
|
|
|
2746 |
name: MTEB MassiveScenarioClassification (it)
|
2747 |
config: it
|
2748 |
split: test
|
2749 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2750 |
metrics:
|
2751 |
- type: accuracy
|
2752 |
value: 46.4694014794889
|
|
|
2759 |
name: MTEB MassiveScenarioClassification (ja)
|
2760 |
config: ja
|
2761 |
split: test
|
2762 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2763 |
metrics:
|
2764 |
- type: accuracy
|
2765 |
value: 46.25756556825824
|
|
|
2772 |
name: MTEB MassiveScenarioClassification (jv)
|
2773 |
config: jv
|
2774 |
split: test
|
2775 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2776 |
metrics:
|
2777 |
- type: accuracy
|
2778 |
value: 41.12642905178212
|
|
|
2785 |
name: MTEB MassiveScenarioClassification (ka)
|
2786 |
config: ka
|
2787 |
split: test
|
2788 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2789 |
metrics:
|
2790 |
- type: accuracy
|
2791 |
value: 24.72763954270343
|
|
|
2798 |
name: MTEB MassiveScenarioClassification (km)
|
2799 |
config: km
|
2800 |
split: test
|
2801 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2802 |
metrics:
|
2803 |
- type: accuracy
|
2804 |
value: 29.741089441829182
|
|
|
2811 |
name: MTEB MassiveScenarioClassification (kn)
|
2812 |
config: kn
|
2813 |
split: test
|
2814 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2815 |
metrics:
|
2816 |
- type: accuracy
|
2817 |
value: 23.850033624747816
|
|
|
2824 |
name: MTEB MassiveScenarioClassification (ko)
|
2825 |
config: ko
|
2826 |
split: test
|
2827 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2828 |
metrics:
|
2829 |
- type: accuracy
|
2830 |
value: 36.56691324815064
|
|
|
2837 |
name: MTEB MassiveScenarioClassification (lv)
|
2838 |
config: lv
|
2839 |
split: test
|
2840 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2841 |
metrics:
|
2842 |
- type: accuracy
|
2843 |
value: 40.928043039677206
|
|
|
2850 |
name: MTEB MassiveScenarioClassification (ml)
|
2851 |
config: ml
|
2852 |
split: test
|
2853 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2854 |
metrics:
|
2855 |
- type: accuracy
|
2856 |
value: 25.527908540685946
|
|
|
2863 |
name: MTEB MassiveScenarioClassification (mn)
|
2864 |
config: mn
|
2865 |
split: test
|
2866 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2867 |
metrics:
|
2868 |
- type: accuracy
|
2869 |
value: 29.105581708137183
|
|
|
2876 |
name: MTEB MassiveScenarioClassification (ms)
|
2877 |
config: ms
|
2878 |
split: test
|
2879 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2880 |
metrics:
|
2881 |
- type: accuracy
|
2882 |
value: 43.78614660390047
|
|
|
2889 |
name: MTEB MassiveScenarioClassification (my)
|
2890 |
config: my
|
2891 |
split: test
|
2892 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2893 |
metrics:
|
2894 |
- type: accuracy
|
2895 |
value: 27.269670477471415
|
|
|
2902 |
name: MTEB MassiveScenarioClassification (nb)
|
2903 |
config: nb
|
2904 |
split: test
|
2905 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2906 |
metrics:
|
2907 |
- type: accuracy
|
2908 |
value: 39.018157363819775
|
|
|
2915 |
name: MTEB MassiveScenarioClassification (nl)
|
2916 |
config: nl
|
2917 |
split: test
|
2918 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2919 |
metrics:
|
2920 |
- type: accuracy
|
2921 |
value: 45.35978480161399
|
|
|
2928 |
name: MTEB MassiveScenarioClassification (pl)
|
2929 |
config: pl
|
2930 |
split: test
|
2931 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2932 |
metrics:
|
2933 |
- type: accuracy
|
2934 |
value: 41.89307330195023
|
|
|
2941 |
name: MTEB MassiveScenarioClassification (pt)
|
2942 |
config: pt
|
2943 |
split: test
|
2944 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2945 |
metrics:
|
2946 |
- type: accuracy
|
2947 |
value: 45.901143241425686
|
|
|
2954 |
name: MTEB MassiveScenarioClassification (ro)
|
2955 |
config: ro
|
2956 |
split: test
|
2957 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2958 |
metrics:
|
2959 |
- type: accuracy
|
2960 |
value: 44.11566913248151
|
|
|
2967 |
name: MTEB MassiveScenarioClassification (ru)
|
2968 |
config: ru
|
2969 |
split: test
|
2970 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2971 |
metrics:
|
2972 |
- type: accuracy
|
2973 |
value: 32.76395427034297
|
|
|
2980 |
name: MTEB MassiveScenarioClassification (sl)
|
2981 |
config: sl
|
2982 |
split: test
|
2983 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2984 |
metrics:
|
2985 |
- type: accuracy
|
2986 |
value: 40.504371217215876
|
|
|
2993 |
name: MTEB MassiveScenarioClassification (sq)
|
2994 |
config: sq
|
2995 |
split: test
|
2996 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2997 |
metrics:
|
2998 |
- type: accuracy
|
2999 |
value: 42.51849361129792
|
|
|
3006 |
name: MTEB MassiveScenarioClassification (sv)
|
3007 |
config: sv
|
3008 |
split: test
|
3009 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
3010 |
metrics:
|
3011 |
- type: accuracy
|
3012 |
value: 42.293207800941495
|
|
|
3019 |
name: MTEB MassiveScenarioClassification (sw)
|
3020 |
config: sw
|
3021 |
split: test
|
3022 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
3023 |
metrics:
|
3024 |
- type: accuracy
|
3025 |
value: 42.9993275050437
|
|
|
3032 |
name: MTEB MassiveScenarioClassification (ta)
|
3033 |
config: ta
|
3034 |
split: test
|
3035 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
3036 |
metrics:
|
3037 |
- type: accuracy
|
3038 |
value: 28.32548755884331
|
|
|
3045 |
name: MTEB MassiveScenarioClassification (te)
|
3046 |
config: te
|
3047 |
split: test
|
3048 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
3049 |
metrics:
|
3050 |
- type: accuracy
|
3051 |
value: 26.593813046402154
|
|
|
3058 |
name: MTEB MassiveScenarioClassification (th)
|
3059 |
config: th
|
3060 |
split: test
|
3061 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
3062 |
metrics:
|
3063 |
- type: accuracy
|
3064 |
value: 36.788836583725626
|
|
|
3071 |
name: MTEB MassiveScenarioClassification (tl)
|
3072 |
config: tl
|
3073 |
split: test
|
3074 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
3075 |
metrics:
|
3076 |
- type: accuracy
|
3077 |
value: 42.5689307330195
|
|
|
3084 |
name: MTEB MassiveScenarioClassification (tr)
|
3085 |
config: tr
|
3086 |
split: test
|
3087 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
3088 |
metrics:
|
3089 |
- type: accuracy
|
3090 |
value: 37.09482178883658
|
|
|
3097 |
name: MTEB MassiveScenarioClassification (ur)
|
3098 |
config: ur
|
3099 |
split: test
|
3100 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
3101 |
metrics:
|
3102 |
- type: accuracy
|
3103 |
value: 28.836583725622063
|
|
|
3110 |
name: MTEB MassiveScenarioClassification (vi)
|
3111 |
config: vi
|
3112 |
split: test
|
3113 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
3114 |
metrics:
|
3115 |
- type: accuracy
|
3116 |
value: 37.357094821788834
|
|
|
3123 |
name: MTEB MassiveScenarioClassification (zh-CN)
|
3124 |
config: zh-CN
|
3125 |
split: test
|
3126 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
3127 |
metrics:
|
3128 |
- type: accuracy
|
3129 |
value: 49.37794216543375
|
|
|
3136 |
name: MTEB MassiveScenarioClassification (zh-TW)
|
3137 |
config: zh-TW
|
3138 |
split: test
|
3139 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
3140 |
metrics:
|
3141 |
- type: accuracy
|
3142 |
value: 44.42165433759248
|
|
|
3149 |
name: MTEB MedrxivClusteringP2P
|
3150 |
config: default
|
3151 |
split: test
|
3152 |
+
revision: dcefc037ef84348e49b0d29109e891c01067226b
|
3153 |
metrics:
|
3154 |
- type: v_measure
|
3155 |
value: 31.374938993074252
|
|
|
3160 |
name: MTEB MedrxivClusteringS2S
|
3161 |
config: default
|
3162 |
split: test
|
3163 |
+
revision: 3cd0e71dfbe09d4de0f9e5ecba43e7ce280959dc
|
3164 |
metrics:
|
3165 |
- type: v_measure
|
3166 |
value: 26.871455379644093
|
|
|
3171 |
name: MTEB MindSmallReranking
|
3172 |
config: default
|
3173 |
split: test
|
3174 |
+
revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69
|
3175 |
metrics:
|
3176 |
- type: map
|
3177 |
value: 30.402396942935333
|
|
|
3184 |
name: MTEB NFCorpus
|
3185 |
config: default
|
3186 |
split: test
|
3187 |
+
revision: 7eb63cc0c1eb59324d709ebed25fcab851fa7610
|
3188 |
metrics:
|
3189 |
- type: map_at_1
|
3190 |
value: 3.7740000000000005
|
|
|
3253 |
name: MTEB NQ
|
3254 |
config: default
|
3255 |
split: test
|
3256 |
+
revision: 6062aefc120bfe8ece5897809fb2e53bfe0d128c
|
3257 |
metrics:
|
3258 |
- type: map_at_1
|
3259 |
value: 15.620999999999999
|
|
|
3322 |
name: MTEB QuoraRetrieval
|
3323 |
config: default
|
3324 |
split: test
|
3325 |
+
revision: 6205996560df11e3a3da9ab4f926788fc30a7db4
|
3326 |
metrics:
|
3327 |
- type: map_at_1
|
3328 |
value: 54.717000000000006
|
|
|
3391 |
name: MTEB RedditClustering
|
3392 |
config: default
|
3393 |
split: test
|
3394 |
+
revision: b2805658ae38990172679479369a78b86de8c390
|
3395 |
metrics:
|
3396 |
- type: v_measure
|
3397 |
value: 40.23390747226228
|
|
|
3402 |
name: MTEB RedditClusteringP2P
|
3403 |
config: default
|
3404 |
split: test
|
3405 |
+
revision: 385e3cb46b4cfa89021f56c4380204149d0efe33
|
3406 |
metrics:
|
3407 |
- type: v_measure
|
3408 |
value: 49.090518272935626
|
|
|
3413 |
name: MTEB SCIDOCS
|
3414 |
config: default
|
3415 |
split: test
|
3416 |
+
revision: 5c59ef3e437a0a9651c8fe6fde943e7dce59fba5
|
3417 |
metrics:
|
3418 |
- type: map_at_1
|
3419 |
value: 3.028
|
|
|
3482 |
name: MTEB SICK-R
|
3483 |
config: default
|
3484 |
split: test
|
3485 |
+
revision: 20a6d6f312dd54037fe07a32d58e5e168867909d
|
3486 |
metrics:
|
3487 |
- type: cos_sim_pearson
|
3488 |
value: 76.62983928119752
|
|
|
3503 |
name: MTEB STS12
|
3504 |
config: default
|
3505 |
split: test
|
3506 |
+
revision: fdf84275bb8ce4b49c971d02e84dd1abc677a50f
|
3507 |
metrics:
|
3508 |
- type: cos_sim_pearson
|
3509 |
value: 74.42679147085553
|
|
|
3524 |
name: MTEB STS13
|
3525 |
config: default
|
3526 |
split: test
|
3527 |
+
revision: 1591bfcbe8c69d4bf7fe2a16e2451017832cafb9
|
3528 |
metrics:
|
3529 |
- type: cos_sim_pearson
|
3530 |
value: 75.62472426599543
|
|
|
3545 |
name: MTEB STS14
|
3546 |
config: default
|
3547 |
split: test
|
3548 |
+
revision: e2125984e7df8b7871f6ae9949cf6b6795e7c54b
|
3549 |
metrics:
|
3550 |
- type: cos_sim_pearson
|
3551 |
value: 74.48227705407035
|
|
|
3566 |
name: MTEB STS15
|
3567 |
config: default
|
3568 |
split: test
|
3569 |
+
revision: 1cd7298cac12a96a373b6a2f18738bb3e739a9b6
|
3570 |
metrics:
|
3571 |
- type: cos_sim_pearson
|
3572 |
value: 78.1566527175902
|
|
|
3587 |
name: MTEB STS16
|
3588 |
config: default
|
3589 |
split: test
|
3590 |
+
revision: 360a0b2dff98700d09e634a01e1cc1624d3e42cd
|
3591 |
metrics:
|
3592 |
- type: cos_sim_pearson
|
3593 |
value: 75.068454465977
|
|
|
3608 |
name: MTEB STS17 (ko-ko)
|
3609 |
config: ko-ko
|
3610 |
split: test
|
3611 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3612 |
metrics:
|
3613 |
- type: cos_sim_pearson
|
3614 |
value: 39.43327289939437
|
|
|
3629 |
name: MTEB STS17 (ar-ar)
|
3630 |
config: ar-ar
|
3631 |
split: test
|
3632 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3633 |
metrics:
|
3634 |
- type: cos_sim_pearson
|
3635 |
value: 55.54431928210687
|
|
|
3650 |
name: MTEB STS17 (en-ar)
|
3651 |
config: en-ar
|
3652 |
split: test
|
3653 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3654 |
metrics:
|
3655 |
- type: cos_sim_pearson
|
3656 |
value: 11.378463868809098
|
|
|
3671 |
name: MTEB STS17 (en-de)
|
3672 |
config: en-de
|
3673 |
split: test
|
3674 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3675 |
metrics:
|
3676 |
- type: cos_sim_pearson
|
3677 |
value: 32.71403560929013
|
|
|
3692 |
name: MTEB STS17 (en-en)
|
3693 |
config: en-en
|
3694 |
split: test
|
3695 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3696 |
metrics:
|
3697 |
- type: cos_sim_pearson
|
3698 |
value: 83.36340470799158
|
|
|
3713 |
name: MTEB STS17 (en-tr)
|
3714 |
config: en-tr
|
3715 |
split: test
|
3716 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3717 |
metrics:
|
3718 |
- type: cos_sim_pearson
|
3719 |
value: 1.9200044163754912
|
|
|
3734 |
name: MTEB STS17 (es-en)
|
3735 |
config: es-en
|
3736 |
split: test
|
3737 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3738 |
metrics:
|
3739 |
- type: cos_sim_pearson
|
3740 |
value: 26.561262451099577
|
|
|
3755 |
name: MTEB STS17 (es-es)
|
3756 |
config: es-es
|
3757 |
split: test
|
3758 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3759 |
metrics:
|
3760 |
- type: cos_sim_pearson
|
3761 |
value: 69.7544202001433
|
|
|
3776 |
name: MTEB STS17 (fr-en)
|
3777 |
config: fr-en
|
3778 |
split: test
|
3779 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3780 |
metrics:
|
3781 |
- type: cos_sim_pearson
|
3782 |
value: 27.70511842301491
|
|
|
3797 |
name: MTEB STS17 (it-en)
|
3798 |
config: it-en
|
3799 |
split: test
|
3800 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3801 |
metrics:
|
3802 |
- type: cos_sim_pearson
|
3803 |
value: 24.226521799447692
|
|
|
3818 |
name: MTEB STS17 (nl-en)
|
3819 |
config: nl-en
|
3820 |
split: test
|
3821 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3822 |
metrics:
|
3823 |
- type: cos_sim_pearson
|
3824 |
value: 29.131412364061234
|
|
|
3839 |
name: MTEB STS22 (en)
|
3840 |
config: en
|
3841 |
split: test
|
3842 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3843 |
metrics:
|
3844 |
- type: cos_sim_pearson
|
3845 |
value: 64.04750650962879
|
|
|
3860 |
name: MTEB STS22 (de)
|
3861 |
config: de
|
3862 |
split: test
|
3863 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3864 |
metrics:
|
3865 |
- type: cos_sim_pearson
|
3866 |
value: 19.26519187000913
|
|
|
3881 |
name: MTEB STS22 (es)
|
3882 |
config: es
|
3883 |
split: test
|
3884 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3885 |
metrics:
|
3886 |
- type: cos_sim_pearson
|
3887 |
value: 34.221261828226936
|
|
|
3902 |
name: MTEB STS22 (pl)
|
3903 |
config: pl
|
3904 |
split: test
|
3905 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3906 |
metrics:
|
3907 |
- type: cos_sim_pearson
|
3908 |
value: 3.620381732096531
|
|
|
3923 |
name: MTEB STS22 (tr)
|
3924 |
config: tr
|
3925 |
split: test
|
3926 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3927 |
metrics:
|
3928 |
- type: cos_sim_pearson
|
3929 |
value: 16.69489628726267
|
|
|
3944 |
name: MTEB STS22 (ar)
|
3945 |
config: ar
|
3946 |
split: test
|
3947 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3948 |
metrics:
|
3949 |
- type: cos_sim_pearson
|
3950 |
value: 9.134927430889528
|
|
|
3965 |
name: MTEB STS22 (ru)
|
3966 |
config: ru
|
3967 |
split: test
|
3968 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3969 |
metrics:
|
3970 |
- type: cos_sim_pearson
|
3971 |
value: 3.6386482942352085
|
|
|
3986 |
name: MTEB STS22 (zh)
|
3987 |
config: zh
|
3988 |
split: test
|
3989 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3990 |
metrics:
|
3991 |
- type: cos_sim_pearson
|
3992 |
value: 2.972091574908432
|
|
|
4007 |
name: MTEB STS22 (fr)
|
4008 |
config: fr
|
4009 |
split: test
|
4010 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
4011 |
metrics:
|
4012 |
- type: cos_sim_pearson
|
4013 |
value: 54.4745185734135
|
|
|
4028 |
name: MTEB STS22 (de-en)
|
4029 |
config: de-en
|
4030 |
split: test
|
4031 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
4032 |
metrics:
|
4033 |
- type: cos_sim_pearson
|
4034 |
value: 49.37865412588201
|
|
|
4049 |
name: MTEB STS22 (es-en)
|
4050 |
config: es-en
|
4051 |
split: test
|
4052 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
4053 |
metrics:
|
4054 |
- type: cos_sim_pearson
|
4055 |
value: 44.925652392562135
|
|
|
4070 |
name: MTEB STS22 (it)
|
4071 |
config: it
|
4072 |
split: test
|
4073 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
4074 |
metrics:
|
4075 |
- type: cos_sim_pearson
|
4076 |
value: 45.241690321111875
|
|
|
4091 |
name: MTEB STS22 (pl-en)
|
4092 |
config: pl-en
|
4093 |
split: test
|
4094 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
4095 |
metrics:
|
4096 |
- type: cos_sim_pearson
|
4097 |
value: 36.42138324083909
|
|
|
4112 |
name: MTEB STS22 (zh-en)
|
4113 |
config: zh-en
|
4114 |
split: test
|
4115 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
4116 |
metrics:
|
4117 |
- type: cos_sim_pearson
|
4118 |
value: 26.55350664089358
|
|
|
4133 |
name: MTEB STS22 (es-it)
|
4134 |
config: es-it
|
4135 |
split: test
|
4136 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
4137 |
metrics:
|
4138 |
- type: cos_sim_pearson
|
4139 |
value: 38.54682179114309
|
|
|
4154 |
name: MTEB STS22 (de-fr)
|
4155 |
config: de-fr
|
4156 |
split: test
|
4157 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
4158 |
metrics:
|
4159 |
- type: cos_sim_pearson
|
4160 |
value: 35.12956772546032
|
|
|
4175 |
name: MTEB STS22 (de-pl)
|
4176 |
config: de-pl
|
4177 |
split: test
|
4178 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
4179 |
metrics:
|
4180 |
- type: cos_sim_pearson
|
4181 |
value: 30.507667380509634
|
|
|
4196 |
name: MTEB STS22 (fr-pl)
|
4197 |
config: fr-pl
|
4198 |
split: test
|
4199 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
4200 |
metrics:
|
4201 |
- type: cos_sim_pearson
|
4202 |
value: 71.10820459712156
|
|
|
4217 |
name: MTEB STSBenchmark
|
4218 |
config: default
|
4219 |
split: test
|
4220 |
+
revision: 8913289635987208e6e7c72789e4be2fe94b6abd
|
4221 |
metrics:
|
4222 |
- type: cos_sim_pearson
|
4223 |
value: 76.53032504460737
|
|
|
4238 |
name: MTEB SciDocsRR
|
4239 |
config: default
|
4240 |
split: test
|
4241 |
+
revision: 56a6d0140cf6356659e2a7c1413286a774468d44
|
4242 |
metrics:
|
4243 |
- type: map
|
4244 |
value: 71.33941904192648
|
|
|
4251 |
name: MTEB SciFact
|
4252 |
config: default
|
4253 |
split: test
|
4254 |
+
revision: a75ae049398addde9b70f6b268875f5cbce99089
|
4255 |
metrics:
|
4256 |
- type: map_at_1
|
4257 |
value: 43.333
|
|
|
4320 |
name: MTEB SprintDuplicateQuestions
|
4321 |
config: default
|
4322 |
split: test
|
4323 |
+
revision: 5a8256d0dff9c4bd3be3ba3e67e4e70173f802ea
|
4324 |
metrics:
|
4325 |
- type: cos_sim_accuracy
|
4326 |
value: 99.7
|
|
|
4375 |
name: MTEB StackExchangeClustering
|
4376 |
config: default
|
4377 |
split: test
|
4378 |
+
revision: 70a89468f6dccacc6aa2b12a6eac54e74328f235
|
4379 |
metrics:
|
4380 |
- type: v_measure
|
4381 |
value: 52.74481093815175
|
|
|
4386 |
name: MTEB StackExchangeClusteringP2P
|
4387 |
config: default
|
4388 |
split: test
|
4389 |
+
revision: d88009ab563dd0b16cfaf4436abaf97fa3550cf0
|
4390 |
metrics:
|
4391 |
- type: v_measure
|
4392 |
value: 32.65999453562101
|
|
|
4397 |
name: MTEB StackOverflowDupQuestions
|
4398 |
config: default
|
4399 |
split: test
|
4400 |
+
revision: ef807ea29a75ec4f91b50fd4191cb4ee4589a9f9
|
4401 |
metrics:
|
4402 |
- type: map
|
4403 |
value: 44.74498464555465
|
|
|
4410 |
name: MTEB SummEval
|
4411 |
config: default
|
4412 |
split: test
|
4413 |
+
revision: 8753c2788d36c01fc6f05d03fe3f7268d63f9122
|
4414 |
metrics:
|
4415 |
- type: cos_sim_pearson
|
4416 |
value: 29.5961822471627
|
|
|
4427 |
name: MTEB TRECCOVID
|
4428 |
config: default
|
4429 |
split: test
|
4430 |
+
revision: 2c8041b2c07a79b6f7ba8fe6acc72e5d9f92d217
|
4431 |
metrics:
|
4432 |
- type: map_at_1
|
4433 |
value: 0.241
|
|
|
4496 |
name: MTEB Touche2020
|
4497 |
config: default
|
4498 |
split: test
|
4499 |
+
revision: 527b7d77e16e343303e68cb6af11d6e18b9f7b3b
|
4500 |
metrics:
|
4501 |
- type: map_at_1
|
4502 |
value: 2.782
|
|
|
4565 |
name: MTEB ToxicConversationsClassification
|
4566 |
config: default
|
4567 |
split: test
|
4568 |
+
revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de
|
4569 |
metrics:
|
4570 |
- type: accuracy
|
4571 |
value: 62.657999999999994
|
|
|
4580 |
name: MTEB TweetSentimentExtractionClassification
|
4581 |
config: default
|
4582 |
split: test
|
4583 |
+
revision: 62146448f05be9e52a36b8ee9936447ea787eede
|
4584 |
metrics:
|
4585 |
- type: accuracy
|
4586 |
value: 52.40803621958121
|
|
|
4593 |
name: MTEB TwentyNewsgroupsClustering
|
4594 |
config: default
|
4595 |
split: test
|
4596 |
+
revision: 091a54f9a36281ce7d6590ec8c75dd485e7e01d4
|
4597 |
metrics:
|
4598 |
- type: v_measure
|
4599 |
value: 32.12697126747911
|
|
|
4604 |
name: MTEB TwitterSemEval2015
|
4605 |
config: default
|
4606 |
split: test
|
4607 |
+
revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
|
4608 |
metrics:
|
4609 |
- type: cos_sim_accuracy
|
4610 |
value: 80.69976753889253
|
|
|
4659 |
name: MTEB TwitterURLCorpus
|
4660 |
config: default
|
4661 |
split: test
|
4662 |
+
revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
|
4663 |
metrics:
|
4664 |
- type: cos_sim_accuracy
|
4665 |
value: 86.90573213800597
|