Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Roman Solomatin
commited on
Commit
•
fc5049d
1
Parent(s):
d3f8e19
add e5 instruct
Browse files- EXTERNAL_MODEL_RESULTS.json +697 -0
- model_meta.yaml +9 -0
EXTERNAL_MODEL_RESULTS.json
CHANGED
@@ -14202,6 +14202,703 @@
|
|
14202 |
]
|
14203 |
}
|
14204 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14205 |
"multilingual-e5-small": {
|
14206 |
"BitextMining": {
|
14207 |
"f1": [
|
|
|
14202 |
]
|
14203 |
}
|
14204 |
},
|
14205 |
+
"multilingual-e5-large-instruct": {
|
14206 |
+
"BitextMining": {
|
14207 |
+
"f1": [
|
14208 |
+
{
|
14209 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14210 |
+
"BornholmBitextMining": 55.42,
|
14211 |
+
"Tatoeba (rus-eng)": 93.7,
|
14212 |
+
"Tatoeba (spa-eng)": 98.53,
|
14213 |
+
"Tatoeba (isl-eng)": 95.2,
|
14214 |
+
"Tatoeba (jpn-eng)": 96.52,
|
14215 |
+
"Tatoeba (bre-eng)": 32.56,
|
14216 |
+
"Tatoeba (ast-eng)": 89.76,
|
14217 |
+
"Tatoeba (pms-eng)": 78.43,
|
14218 |
+
"Tatoeba (kab-eng)": 57.13,
|
14219 |
+
"Tatoeba (kat-eng)": 92.64,
|
14220 |
+
"Tatoeba (tur-eng)": 98.13,
|
14221 |
+
"Tatoeba (swh-eng)": 80.12,
|
14222 |
+
"Tatoeba (ukr-eng)": 94.8,
|
14223 |
+
"Tatoeba (epo-eng)": 98.38,
|
14224 |
+
"Tatoeba (lfn-eng)": 80.03,
|
14225 |
+
"Tatoeba (sqi-eng)": 96.45,
|
14226 |
+
"Tatoeba (cor-eng)": 11.54,
|
14227 |
+
"Tatoeba (pam-eng)": 19.75,
|
14228 |
+
"Tatoeba (gla-eng)": 77.87,
|
14229 |
+
"Tatoeba (amh-eng)": 87.64,
|
14230 |
+
"Tatoeba (jav-eng)": 85.29,
|
14231 |
+
"Tatoeba (lvs-eng)": 93.02,
|
14232 |
+
"Tatoeba (por-eng)": 94.91,
|
14233 |
+
"Tatoeba (arz-eng)": 82.26,
|
14234 |
+
"Tatoeba (aze-eng)": 93.89,
|
14235 |
+
"Tatoeba (yid-eng)": 90.47,
|
14236 |
+
"Tatoeba (gle-eng)": 84.37,
|
14237 |
+
"Tatoeba (pol-eng)": 97.33,
|
14238 |
+
"Tatoeba (ina-eng)": 96.1,
|
14239 |
+
"Tatoeba (ell-eng)": 94.85,
|
14240 |
+
"Tatoeba (fry-eng)": 80.83,
|
14241 |
+
"Tatoeba (kor-eng)": 91.77,
|
14242 |
+
"Tatoeba (ita-eng)": 94.37,
|
14243 |
+
"Tatoeba (ces-eng)": 96.23,
|
14244 |
+
"Tatoeba (fra-eng)": 94.97,
|
14245 |
+
"Tatoeba (ceb-eng)": 71.09,
|
14246 |
+
"Tatoeba (mal-eng)": 98.93,
|
14247 |
+
"Tatoeba (ido-eng)": 91.78,
|
14248 |
+
"Tatoeba (dtp-eng)": 14.1,
|
14249 |
+
"Tatoeba (kaz-eng)": 87.42,
|
14250 |
+
"Tatoeba (zsm-eng)": 96.62,
|
14251 |
+
"Tatoeba (dan-eng)": 95.87,
|
14252 |
+
"Tatoeba (nov-eng)": 83.42,
|
14253 |
+
"Tatoeba (tel-eng)": 95.51,
|
14254 |
+
"Tatoeba (arq-eng)": 64.1,
|
14255 |
+
"Tatoeba (dsb-eng)": 72.99,
|
14256 |
+
"Tatoeba (xho-eng)": 86.74,
|
14257 |
+
"Tatoeba (glg-eng)": 95.96,
|
14258 |
+
"Tatoeba (uig-eng)": 87.01,
|
14259 |
+
"Tatoeba (ang-eng)": 72.74,
|
14260 |
+
"Tatoeba (tzl-eng)": 59.17,
|
14261 |
+
"Tatoeba (deu-eng)": 99.33,
|
14262 |
+
"Tatoeba (ile-eng)": 89.94,
|
14263 |
+
"Tatoeba (lat-eng)": 74.17,
|
14264 |
+
"Tatoeba (eus-eng)": 84.67,
|
14265 |
+
"Tatoeba (bul-eng)": 94.52,
|
14266 |
+
"Tatoeba (hun-eng)": 94.52,
|
14267 |
+
"Tatoeba (mkd-eng)": 92.17,
|
14268 |
+
"Tatoeba (nds-eng)": 86.3,
|
14269 |
+
"Tatoeba (swg-eng)": 77.68,
|
14270 |
+
"Tatoeba (fao-eng)": 87.43,
|
14271 |
+
"Tatoeba (war-eng)": 73.83,
|
14272 |
+
"Tatoeba (csb-eng)": 68.19,
|
14273 |
+
"Tatoeba (afr-eng)": 95.67,
|
14274 |
+
"Tatoeba (bos-eng)": 95.86,
|
14275 |
+
"Tatoeba (cat-eng)": 95.53,
|
14276 |
+
"Tatoeba (hrv-eng)": 96.75,
|
14277 |
+
"Tatoeba (mhr-eng)": 16.98,
|
14278 |
+
"Tatoeba (tha-eng)": 96.59,
|
14279 |
+
"Tatoeba (tat-eng)": 85.7,
|
14280 |
+
"Tatoeba (oci-eng)": 72.35,
|
14281 |
+
"Tatoeba (urd-eng)": 93.22,
|
14282 |
+
"Tatoeba (kur-eng)": 77.3,
|
14283 |
+
"Tatoeba (lit-eng)": 91.71,
|
14284 |
+
"Tatoeba (cha-eng)": 50.55,
|
14285 |
+
"Tatoeba (ron-eng)": 97.32,
|
14286 |
+
"Tatoeba (pes-eng)": 94.92,
|
14287 |
+
"Tatoeba (cmn-eng)": 96.5,
|
14288 |
+
"Tatoeba (slv-eng)": 91.6,
|
14289 |
+
"Tatoeba (heb-eng)": 91.52,
|
14290 |
+
"Tatoeba (bel-eng)": 95.48,
|
14291 |
+
"Tatoeba (slk-eng)": 95.27,
|
14292 |
+
"Tatoeba (cbk-eng)": 83.24,
|
14293 |
+
"Tatoeba (yue-eng)": 92.84,
|
14294 |
+
"Tatoeba (hin-eng)": 97.47,
|
14295 |
+
"Tatoeba (nob-eng)": 98.33,
|
14296 |
+
"Tatoeba (gsw-eng)": 59.47,
|
14297 |
+
"Tatoeba (tgl-eng)": 96.0,
|
14298 |
+
"Tatoeba (nno-eng)": 94.06,
|
14299 |
+
"Tatoeba (est-eng)": 87.46,
|
14300 |
+
"Tatoeba (vie-eng)": 97.5,
|
14301 |
+
"Tatoeba (ber-eng)": 54.69,
|
14302 |
+
"Tatoeba (wuu-eng)": 92.4,
|
14303 |
+
"Tatoeba (tuk-eng)": 50.55,
|
14304 |
+
"Tatoeba (hye-eng)": 93.65,
|
14305 |
+
"Tatoeba (ara-eng)": 91.38,
|
14306 |
+
"Tatoeba (mon-eng)": 95.8,
|
14307 |
+
"Tatoeba (awa-eng)": 91.41,
|
14308 |
+
"Tatoeba (khm-eng)": 75.24,
|
14309 |
+
"Tatoeba (swe-eng)": 95.35,
|
14310 |
+
"Tatoeba (ind-eng)": 94.32,
|
14311 |
+
"Tatoeba (max-eng)": 74.56,
|
14312 |
+
"Tatoeba (hsb-eng)": 79.87,
|
14313 |
+
"Tatoeba (ben-eng)": 89.03,
|
14314 |
+
"Tatoeba (kzj-eng)": 14.96,
|
14315 |
+
"Tatoeba (uzb-eng)": 80.49,
|
14316 |
+
"Tatoeba (tam-eng)": 90.88,
|
14317 |
+
"Tatoeba (mar-eng)": 92.33,
|
14318 |
+
"Tatoeba (cym-eng)": 89.86,
|
14319 |
+
"Tatoeba (srp-eng)": 94.55,
|
14320 |
+
"Tatoeba (fin-eng)": 96.8,
|
14321 |
+
"Tatoeba (nld-eng)": 97.53,
|
14322 |
+
"Tatoeba (orv-eng)": 58.83
|
14323 |
+
}
|
14324 |
+
]
|
14325 |
+
},
|
14326 |
+
"Classification": {
|
14327 |
+
"accuracy": [
|
14328 |
+
{
|
14329 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14330 |
+
"AllegroReviews": 52.43,
|
14331 |
+
"AmazonCounterfactualClassification (en-ext)": 66.42,
|
14332 |
+
"AmazonCounterfactualClassification (en)": 69.72,
|
14333 |
+
"AmazonCounterfactualClassification (de)": 65.61,
|
14334 |
+
"AmazonCounterfactualClassification (ja)": 70.47,
|
14335 |
+
"AmazonPolarityClassification": 96.24,
|
14336 |
+
"AmazonReviewsClassification (en)": 56.2,
|
14337 |
+
"AmazonReviewsClassification (de)": 54.54,
|
14338 |
+
"AmazonReviewsClassification (es)": 49.88,
|
14339 |
+
"AmazonReviewsClassification (fr)": 49.78,
|
14340 |
+
"AmazonReviewsClassification (ja)": 49.36,
|
14341 |
+
"AmazonReviewsClassification (zh)": 45.1,
|
14342 |
+
"AngryTweetsClassification": 59.53,
|
14343 |
+
"Banking77Classification": 78.04,
|
14344 |
+
"CBD": 74.21,
|
14345 |
+
"DanishPoliticalCommentsClassification": 33.07,
|
14346 |
+
"EmotionClassification": 54.99,
|
14347 |
+
"GeoreviewClassification": 55.9,
|
14348 |
+
"HeadlineClassification": 86.18,
|
14349 |
+
"IFlyTek": 44.06,
|
14350 |
+
"ImdbClassification": 94.61,
|
14351 |
+
"InappropriatenessClassification": 65.53,
|
14352 |
+
"JDReview": 80.23,
|
14353 |
+
"KinopoiskClassification": 66.12,
|
14354 |
+
"LccSentimentClassification": 60.27,
|
14355 |
+
"MTOPDomainClassification (en)": 91.18,
|
14356 |
+
"MTOPDomainClassification (de)": 90.03,
|
14357 |
+
"MTOPDomainClassification (es)": 89.1,
|
14358 |
+
"MTOPDomainClassification (fr)": 85.89,
|
14359 |
+
"MTOPDomainClassification (hi)": 86.29,
|
14360 |
+
"MTOPDomainClassification (th)": 83.35,
|
14361 |
+
"MTOPIntentClassification (en)": 68.05,
|
14362 |
+
"MTOPIntentClassification (de)": 68.55,
|
14363 |
+
"MTOPIntentClassification (es)": 69.86,
|
14364 |
+
"MTOPIntentClassification (fr)": 63.26,
|
14365 |
+
"MTOPIntentClassification (hi)": 63.38,
|
14366 |
+
"MTOPIntentClassification (th)": 64.8,
|
14367 |
+
"MasakhaNEWSClassification (amh)": 88.48,
|
14368 |
+
"MasakhaNEWSClassification (eng)": 82.12,
|
14369 |
+
"MasakhaNEWSClassification (fra)": 78.93,
|
14370 |
+
"MasakhaNEWSClassification (hau)": 80.94,
|
14371 |
+
"MasakhaNEWSClassification (ibo)": 77.28,
|
14372 |
+
"MasakhaNEWSClassification (lin)": 75.94,
|
14373 |
+
"MasakhaNEWSClassification (lug)": 70.36,
|
14374 |
+
"MasakhaNEWSClassification (orm)": 80.28,
|
14375 |
+
"MasakhaNEWSClassification (pcm)": 93.74,
|
14376 |
+
"MasakhaNEWSClassification (run)": 83.88,
|
14377 |
+
"MasakhaNEWSClassification (sna)": 87.91,
|
14378 |
+
"MasakhaNEWSClassification (som)": 68.61,
|
14379 |
+
"MasakhaNEWSClassification (swa)": 79.47,
|
14380 |
+
"MasakhaNEWSClassification (tir)": 72.87,
|
14381 |
+
"MasakhaNEWSClassification (xho)": 84.95,
|
14382 |
+
"MasakhaNEWSClassification (yor)": 82.99,
|
14383 |
+
"MassiveIntentClassification (sl)": 63.79,
|
14384 |
+
"MassiveIntentClassification (cy)": 56.22,
|
14385 |
+
"MassiveIntentClassification (sq)": 62.12,
|
14386 |
+
"MassiveIntentClassification (ar)": 55.87,
|
14387 |
+
"MassiveIntentClassification (vi)": 64.49,
|
14388 |
+
"MassiveIntentClassification (fr)": 66.88,
|
14389 |
+
"MassiveIntentClassification (sw)": 54.14,
|
14390 |
+
"MassiveIntentClassification (es)": 65.56,
|
14391 |
+
"MassiveIntentClassification (az)": 61.96,
|
14392 |
+
"MassiveIntentClassification (hy)": 61.73,
|
14393 |
+
"MassiveIntentClassification (ru)": 67.6,
|
14394 |
+
"MassiveIntentClassification (tr)": 66.3,
|
14395 |
+
"MassiveIntentClassification (ta)": 60.4,
|
14396 |
+
"MassiveIntentClassification (lv)": 61.49,
|
14397 |
+
"MassiveIntentClassification (jv)": 55.6,
|
14398 |
+
"MassiveIntentClassification (fi)": 66.02,
|
14399 |
+
"MassiveIntentClassification (fa)": 68.48,
|
14400 |
+
"MassiveIntentClassification (ka)": 50.74,
|
14401 |
+
"MassiveIntentClassification (ro)": 63.73,
|
14402 |
+
"MassiveIntentClassification (sv)": 68.76,
|
14403 |
+
"MassiveIntentClassification (ms)": 63.88,
|
14404 |
+
"MassiveIntentClassification (kn)": 58.98,
|
14405 |
+
"MassiveIntentClassification (ml)": 63.22,
|
14406 |
+
"MassiveIntentClassification (pt)": 67.28,
|
14407 |
+
"MassiveIntentClassification (my)": 57.06,
|
14408 |
+
"MassiveIntentClassification (tl)": 60.99,
|
14409 |
+
"MassiveIntentClassification (af)": 60.95,
|
14410 |
+
"MassiveIntentClassification (he)": 63.5,
|
14411 |
+
"MassiveIntentClassification (ja)": 68.29,
|
14412 |
+
"MassiveIntentClassification (zh-CN)": 67.6,
|
14413 |
+
"MassiveIntentClassification (km)": 46.88,
|
14414 |
+
"MassiveIntentClassification (mn)": 58.85,
|
14415 |
+
"MassiveIntentClassification (am)": 54.28,
|
14416 |
+
"MassiveIntentClassification (de)": 65.91,
|
14417 |
+
"MassiveIntentClassification (el)": 65.25,
|
14418 |
+
"MassiveIntentClassification (hu)": 65.54,
|
14419 |
+
"MassiveIntentClassification (en)": 70.89,
|
14420 |
+
"MassiveIntentClassification (id)": 65.45,
|
14421 |
+
"MassiveIntentClassification (ur)": 60.74,
|
14422 |
+
"MassiveIntentClassification (nl)": 68.18,
|
14423 |
+
"MassiveIntentClassification (da)": 65.08,
|
14424 |
+
"MassiveIntentClassification (th)": 62.58,
|
14425 |
+
"MassiveIntentClassification (pl)": 67.45,
|
14426 |
+
"MassiveIntentClassification (zh-TW)": 62.09,
|
14427 |
+
"MassiveIntentClassification (bn)": 61.73,
|
14428 |
+
"MassiveIntentClassification (is)": 57.29,
|
14429 |
+
"MassiveIntentClassification (te)": 61.38,
|
14430 |
+
"MassiveIntentClassification (ko)": 64.16,
|
14431 |
+
"MassiveIntentClassification (nb)": 65.85,
|
14432 |
+
"MassiveIntentClassification (it)": 66.82,
|
14433 |
+
"MassiveIntentClassification (hi)": 65.27,
|
14434 |
+
"MassiveScenarioClassification (ar)": 63.86,
|
14435 |
+
"MassiveScenarioClassification (id)": 70.52,
|
14436 |
+
"MassiveScenarioClassification (sl)": 68.6,
|
14437 |
+
"MassiveScenarioClassification (sq)": 69.09,
|
14438 |
+
"MassiveScenarioClassification (km)": 54.17,
|
14439 |
+
"MassiveScenarioClassification (fr)": 71.16,
|
14440 |
+
"MassiveScenarioClassification (bn)": 67.55,
|
14441 |
+
"MassiveScenarioClassification (am)": 61.43,
|
14442 |
+
"MassiveScenarioClassification (nl)": 73.21,
|
14443 |
+
"MassiveScenarioClassification (af)": 68.3,
|
14444 |
+
"MassiveScenarioClassification (fi)": 68.99,
|
14445 |
+
"MassiveScenarioClassification (ja)": 73.43,
|
14446 |
+
"MassiveScenarioClassification (en)": 73.88,
|
14447 |
+
"MassiveScenarioClassification (pt)": 69.76,
|
14448 |
+
"MassiveScenarioClassification (ka)": 59.88,
|
14449 |
+
"MassiveScenarioClassification (da)": 71.88,
|
14450 |
+
"MassiveScenarioClassification (jv)": 63.3,
|
14451 |
+
"MassiveScenarioClassification (zh-TW)": 68.04,
|
14452 |
+
"MassiveScenarioClassification (cy)": 62.98,
|
14453 |
+
"MassiveScenarioClassification (hu)": 71.72,
|
14454 |
+
"MassiveScenarioClassification (lv)": 67.54,
|
14455 |
+
"MassiveScenarioClassification (pl)": 71.44,
|
14456 |
+
"MassiveScenarioClassification (de)": 72.68,
|
14457 |
+
"MassiveScenarioClassification (ms)": 69.07,
|
14458 |
+
"MassiveScenarioClassification (ur)": 67.11,
|
14459 |
+
"MassiveScenarioClassification (kn)": 66.84,
|
14460 |
+
"MassiveScenarioClassification (ta)": 64.96,
|
14461 |
+
"MassiveScenarioClassification (is)": 67.14,
|
14462 |
+
"MassiveScenarioClassification (it)": 70.44,
|
14463 |
+
"MassiveScenarioClassification (sv)": 73.38,
|
14464 |
+
"MassiveScenarioClassification (te)": 67.53,
|
14465 |
+
"MassiveScenarioClassification (th)": 69.54,
|
14466 |
+
"MassiveScenarioClassification (fa)": 72.17,
|
14467 |
+
"MassiveScenarioClassification (mn)": 63.21,
|
14468 |
+
"MassiveScenarioClassification (ro)": 68.72,
|
14469 |
+
"MassiveScenarioClassification (ml)": 69.13,
|
14470 |
+
"MassiveScenarioClassification (az)": 66.18,
|
14471 |
+
"MassiveScenarioClassification (hi)": 69.83,
|
14472 |
+
"MassiveScenarioClassification (el)": 71.1,
|
14473 |
+
"MassiveScenarioClassification (nb)": 71.28,
|
14474 |
+
"MassiveScenarioClassification (tl)": 67.64,
|
14475 |
+
"MassiveScenarioClassification (he)": 67.63,
|
14476 |
+
"MassiveScenarioClassification (vi)": 69.17,
|
14477 |
+
"MassiveScenarioClassification (es)": 70.0,
|
14478 |
+
"MassiveScenarioClassification (ko)": 70.49,
|
14479 |
+
"MassiveScenarioClassification (tr)": 69.63,
|
14480 |
+
"MassiveScenarioClassification (ru)": 71.59,
|
14481 |
+
"MassiveScenarioClassification (zh-CN)": 72.41,
|
14482 |
+
"MassiveScenarioClassification (my)": 63.12,
|
14483 |
+
"MassiveScenarioClassification (sw)": 62.96,
|
14484 |
+
"MassiveScenarioClassification (hy)": 65.85,
|
14485 |
+
"MultilingualSentiment": 71.66,
|
14486 |
+
"NoRecClassification": 57.06,
|
14487 |
+
"NordicLangClassification": 76.57,
|
14488 |
+
"OnlineShopping": 91.83,
|
14489 |
+
"PAC": 65.68,
|
14490 |
+
"PolEmo2.0-IN": 80.96,
|
14491 |
+
"PolEmo2.0-OUT": 56.96,
|
14492 |
+
"RuReviewsClassification": 68.56,
|
14493 |
+
"RuSciBenchGRNTIClassification": 65.07,
|
14494 |
+
"RuSciBenchOECDClassification": 50.21,
|
14495 |
+
"TNews": 49.85,
|
14496 |
+
"ToxicConversationsClassification": 66.82,
|
14497 |
+
"TweetSentimentExtractionClassification": 59.18,
|
14498 |
+
"Waimai": 86.83
|
14499 |
+
}
|
14500 |
+
]
|
14501 |
+
},
|
14502 |
+
"Clustering": {
|
14503 |
+
"v_measure": [
|
14504 |
+
{
|
14505 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14506 |
+
"GeoreviewClusteringP2P": 74.34,
|
14507 |
+
"MLSUMClusteringP2P (ru)": 57.77,
|
14508 |
+
"MLSUMClusteringS2S (ru)": 57.5,
|
14509 |
+
"MasakhaNEWSClusteringP2P (amh)": 74.82,
|
14510 |
+
"MasakhaNEWSClusteringP2P (eng)": 70.12,
|
14511 |
+
"MasakhaNEWSClusteringP2P (fra)": 70.48,
|
14512 |
+
"MasakhaNEWSClusteringP2P (hau)": 71.22,
|
14513 |
+
"MasakhaNEWSClusteringP2P (ibo)": 56.64,
|
14514 |
+
"MasakhaNEWSClusteringP2P (lin)": 72.94,
|
14515 |
+
"MasakhaNEWSClusteringP2P (lug)": 65.94,
|
14516 |
+
"MasakhaNEWSClusteringP2P (orm)": 45.85,
|
14517 |
+
"MasakhaNEWSClusteringP2P (pcm)": 88.57,
|
14518 |
+
"MasakhaNEWSClusteringP2P (run)": 64.64,
|
14519 |
+
"MasakhaNEWSClusteringP2P (sna)": 81.13,
|
14520 |
+
"MasakhaNEWSClusteringP2P (som)": 54.59,
|
14521 |
+
"MasakhaNEWSClusteringP2P (swa)": 40.55,
|
14522 |
+
"MasakhaNEWSClusteringP2P (tir)": 62.76,
|
14523 |
+
"MasakhaNEWSClusteringP2P (xho)": 57.61,
|
14524 |
+
"MasakhaNEWSClusteringP2P (yor)": 64.78,
|
14525 |
+
"MasakhaNEWSClusteringS2S (amh)": 67.38,
|
14526 |
+
"MasakhaNEWSClusteringS2S (eng)": 61.2,
|
14527 |
+
"MasakhaNEWSClusteringS2S (fra)": 69.48,
|
14528 |
+
"MasakhaNEWSClusteringS2S (hau)": 52.67,
|
14529 |
+
"MasakhaNEWSClusteringS2S (ibo)": 78.68,
|
14530 |
+
"MasakhaNEWSClusteringS2S (lin)": 80.26,
|
14531 |
+
"MasakhaNEWSClusteringS2S (lug)": 49.73,
|
14532 |
+
"MasakhaNEWSClusteringS2S (orm)": 44.39,
|
14533 |
+
"MasakhaNEWSClusteringS2S (pcm)": 74.0,
|
14534 |
+
"MasakhaNEWSClusteringS2S (run)": 64.34,
|
14535 |
+
"MasakhaNEWSClusteringS2S (sna)": 67.1,
|
14536 |
+
"MasakhaNEWSClusteringS2S (som)": 44.64,
|
14537 |
+
"MasakhaNEWSClusteringS2S (swa)": 42.09,
|
14538 |
+
"MasakhaNEWSClusteringS2S (tir)": 54.35,
|
14539 |
+
"MasakhaNEWSClusteringS2S (xho)": 43.19,
|
14540 |
+
"MasakhaNEWSClusteringS2S (yor)": 54.23,
|
14541 |
+
"RuSciBenchGRNTIClusteringP2P": 62.21,
|
14542 |
+
"RuSciBenchOECDClusteringP2P": 53.09,
|
14543 |
+
"TwentyNewsgroupsClustering": 51.03
|
14544 |
+
}
|
14545 |
+
]
|
14546 |
+
},
|
14547 |
+
"PairClassification": {
|
14548 |
+
"max_ap": [
|
14549 |
+
{
|
14550 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14551 |
+
"CDSC-E": 76.17,
|
14552 |
+
"OpusparcusPC (de)": 97.56,
|
14553 |
+
"OpusparcusPC (en)": 98.91,
|
14554 |
+
"OpusparcusPC (fi)": 94.74,
|
14555 |
+
"OpusparcusPC (fr)": 94.73,
|
14556 |
+
"OpusparcusPC (ru)": 91.1,
|
14557 |
+
"OpusparcusPC (sv)": 95.78,
|
14558 |
+
"PSC": 99.31,
|
14559 |
+
"PawsXPairClassification (de)": 56.46,
|
14560 |
+
"PawsXPairClassification (en)": 64.4,
|
14561 |
+
"PawsXPairClassification (es)": 57.27,
|
14562 |
+
"PawsXPairClassification (fr)": 59.73,
|
14563 |
+
"PawsXPairClassification (ja)": 50.82,
|
14564 |
+
"PawsXPairClassification (ko)": 51.63,
|
14565 |
+
"PawsXPairClassification (zh)": 57.34,
|
14566 |
+
"SICK-E-PL": 79.69,
|
14567 |
+
"SprintDuplicateQuestions": 92.19,
|
14568 |
+
"TERRa": 63.77,
|
14569 |
+
"TwitterSemEval2015": 79.79,
|
14570 |
+
"TwitterURLCorpus": 86.73
|
14571 |
+
},
|
14572 |
+
{
|
14573 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14574 |
+
"CDSC-E": 76.17,
|
14575 |
+
"OpusparcusPC (de)": 97.56,
|
14576 |
+
"OpusparcusPC (en)": 98.92,
|
14577 |
+
"OpusparcusPC (fi)": 94.74,
|
14578 |
+
"OpusparcusPC (fr)": 94.73,
|
14579 |
+
"OpusparcusPC (ru)": 91.1,
|
14580 |
+
"OpusparcusPC (sv)": 95.78,
|
14581 |
+
"PSC": 99.31,
|
14582 |
+
"PawsXPairClassification (de)": 56.64,
|
14583 |
+
"PawsXPairClassification (en)": 64.43,
|
14584 |
+
"PawsXPairClassification (es)": 57.28,
|
14585 |
+
"PawsXPairClassification (fr)": 59.76,
|
14586 |
+
"PawsXPairClassification (ja)": 50.82,
|
14587 |
+
"PawsXPairClassification (ko)": 51.74,
|
14588 |
+
"PawsXPairClassification (zh)": 57.75,
|
14589 |
+
"SICK-E-PL": 79.69,
|
14590 |
+
"SprintDuplicateQuestions": 92.19,
|
14591 |
+
"TERRa": 63.89,
|
14592 |
+
"TwitterSemEval2015": 79.79,
|
14593 |
+
"TwitterURLCorpus": 86.73
|
14594 |
+
}
|
14595 |
+
]
|
14596 |
+
},
|
14597 |
+
"Reranking": {
|
14598 |
+
"map": [
|
14599 |
+
{
|
14600 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14601 |
+
"AlloprofReranking": 74.68,
|
14602 |
+
"AskUbuntuDupQuestions": 64.41,
|
14603 |
+
"MMarcoReranking": 23.6,
|
14604 |
+
"MindSmallReranking": 33.07,
|
14605 |
+
"RuBQReranking": 75.84,
|
14606 |
+
"SciDocsRR": 85.75,
|
14607 |
+
"StackOverflowDupQuestions": 52.45,
|
14608 |
+
"SyntecReranking": 89.95,
|
14609 |
+
"T2Reranking": 67.12
|
14610 |
+
},
|
14611 |
+
{
|
14612 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14613 |
+
"MIRACLReranking (ru)": 62.49
|
14614 |
+
}
|
14615 |
+
]
|
14616 |
+
},
|
14617 |
+
"Retrieval": {
|
14618 |
+
"ndcg_at_10": [
|
14619 |
+
{
|
14620 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14621 |
+
"AILACasedocs": 33.33,
|
14622 |
+
"AILAStatutes": 29.66,
|
14623 |
+
"ARCChallenge": 15.03,
|
14624 |
+
"AlloprofRetrieval": 52.12,
|
14625 |
+
"AlphaNLI": 24.87,
|
14626 |
+
"AppsRetrieval": 34.89,
|
14627 |
+
"ArguAna": 58.48,
|
14628 |
+
"ArguAna-PL": 44.6,
|
14629 |
+
"BSARDRetrieval": 24.61,
|
14630 |
+
"CmedqaRetrieval": 34.15,
|
14631 |
+
"CodeFeedbackMT": 39.87,
|
14632 |
+
"CodeFeedbackST": 75.89,
|
14633 |
+
"CodeSearchNetCCRetrieval (python)": 86.24,
|
14634 |
+
"CodeSearchNetCCRetrieval (javascript)": 79.2,
|
14635 |
+
"CodeSearchNetCCRetrieval (go)": 72.31,
|
14636 |
+
"CodeSearchNetCCRetrieval (ruby)": 81.9,
|
14637 |
+
"CodeSearchNetCCRetrieval (java)": 80.07,
|
14638 |
+
"CodeSearchNetCCRetrieval (php)": 72.49,
|
14639 |
+
"CodeSearchNetRetrieval (python)": 88.96,
|
14640 |
+
"CodeSearchNetRetrieval (javascript)": 75.37,
|
14641 |
+
"CodeSearchNetRetrieval (go)": 90.41,
|
14642 |
+
"CodeSearchNetRetrieval (ruby)": 79.56,
|
14643 |
+
"CodeSearchNetRetrieval (java)": 80.55,
|
14644 |
+
"CodeSearchNetRetrieval (php)": 83.7,
|
14645 |
+
"CodeTransOceanContest": 83.36,
|
14646 |
+
"CodeTransOceanDL": 29.01,
|
14647 |
+
"CosQA": 37.75,
|
14648 |
+
"CovidRetrieval": 75.8,
|
14649 |
+
"DuRetrieval": 85.16,
|
14650 |
+
"EcomRetrieval": 53.92,
|
14651 |
+
"FiQA-PL": 32.01,
|
14652 |
+
"FiQA2018": 48.42,
|
14653 |
+
"GerDaLIRSmall": 21.34,
|
14654 |
+
"HellaSwag": 32.02,
|
14655 |
+
"LEMBNarrativeQARetrieval": 26.71,
|
14656 |
+
"LEMBQMSumRetrieval": 26.08,
|
14657 |
+
"LEMBSummScreenFDRetrieval": 72.75,
|
14658 |
+
"LEMBWikimQARetrieval": 57.79,
|
14659 |
+
"LeCaRDv2": 64.22,
|
14660 |
+
"LegalBenchConsumerContractsQA": 76.8,
|
14661 |
+
"LegalBenchCorporateLobbying": 94.25,
|
14662 |
+
"LegalQuAD": 51.25,
|
14663 |
+
"LegalSummarization": 68.07,
|
14664 |
+
"MIRACLRetrieval (ru)": 66.08,
|
14665 |
+
"MMarcoRetrieval": 78.81,
|
14666 |
+
"MedicalRetrieval": 56.55,
|
14667 |
+
"MintakaRetrieval (ar)": 26.13,
|
14668 |
+
"MintakaRetrieval (de)": 35.13,
|
14669 |
+
"MintakaRetrieval (es)": 34.51,
|
14670 |
+
"MintakaRetrieval (fr)": 33.49,
|
14671 |
+
"MintakaRetrieval (hi)": 28.19,
|
14672 |
+
"MintakaRetrieval (it)": 35.01,
|
14673 |
+
"MintakaRetrieval (ja)": 26.31,
|
14674 |
+
"MintakaRetrieval (pt)": 35.11,
|
14675 |
+
"NFCorpus": 36.34,
|
14676 |
+
"NFCorpus-PL": 32.66,
|
14677 |
+
"PIQA": 32.26,
|
14678 |
+
"Quail": 8.63,
|
14679 |
+
"RARbCode": 71.22,
|
14680 |
+
"RARbMath": 71.95,
|
14681 |
+
"RiaNewsRetrieval": 83.26,
|
14682 |
+
"RuBQRetrieval": 73.9,
|
14683 |
+
"SCIDOCS": 19.24,
|
14684 |
+
"SCIDOCS-PL": 17.15,
|
14685 |
+
"SIQA": 7.33,
|
14686 |
+
"SciFact": 71.62,
|
14687 |
+
"SciFact-PL": 63.31,
|
14688 |
+
"SpartQA": 13.54,
|
14689 |
+
"StackOverflowQA": 85.82,
|
14690 |
+
"SyntecRetrieval": 87.8,
|
14691 |
+
"SyntheticText2SQL": 58.85,
|
14692 |
+
"T2Retrieval": 82.92,
|
14693 |
+
"TRECCOVID": 82.51,
|
14694 |
+
"TRECCOVID-PL": 62.03,
|
14695 |
+
"TempReasonL1": 1.2,
|
14696 |
+
"TempReasonL2Fact": 40.19,
|
14697 |
+
"TempReasonL2Pure": 3.64,
|
14698 |
+
"TempReasonL3Fact": 37.37,
|
14699 |
+
"TempReasonL3Pure": 9.82,
|
14700 |
+
"Touche2020": 27.4,
|
14701 |
+
"VideoRetrieval": 52.24,
|
14702 |
+
"WinoGrande": 54.27,
|
14703 |
+
"XPQARetrieval (ara-ara)": 48.56,
|
14704 |
+
"XPQARetrieval (eng-ara)": 34.01,
|
14705 |
+
"XPQARetrieval (ara-eng)": 45.13,
|
14706 |
+
"XPQARetrieval (deu-deu)": 79.18,
|
14707 |
+
"XPQARetrieval (eng-deu)": 50.55,
|
14708 |
+
"XPQARetrieval (deu-eng)": 75.23,
|
14709 |
+
"XPQARetrieval (spa-spa)": 65.07,
|
14710 |
+
"XPQARetrieval (eng-spa)": 41.75,
|
14711 |
+
"XPQARetrieval (spa-eng)": 61.02,
|
14712 |
+
"XPQARetrieval (fra-fra)": 72.72,
|
14713 |
+
"XPQARetrieval (eng-fra)": 46.17,
|
14714 |
+
"XPQARetrieval (fra-eng)": 69.33,
|
14715 |
+
"XPQARetrieval (hin-hin)": 76.62,
|
14716 |
+
"XPQARetrieval (eng-hin)": 18.12,
|
14717 |
+
"XPQARetrieval (hin-eng)": 71.95,
|
14718 |
+
"XPQARetrieval (ita-ita)": 77.03,
|
14719 |
+
"XPQARetrieval (eng-ita)": 41.42,
|
14720 |
+
"XPQARetrieval (ita-eng)": 70.6,
|
14721 |
+
"XPQARetrieval (jpn-jpn)": 74.36,
|
14722 |
+
"XPQARetrieval (eng-jpn)": 41.97,
|
14723 |
+
"XPQARetrieval (jpn-eng)": 70.5,
|
14724 |
+
"XPQARetrieval (kor-kor)": 39.74,
|
14725 |
+
"XPQARetrieval (eng-kor)": 34.9,
|
14726 |
+
"XPQARetrieval (kor-eng)": 37.39,
|
14727 |
+
"XPQARetrieval (pol-pol)": 52.53,
|
14728 |
+
"XPQARetrieval (eng-pol)": 34.22,
|
14729 |
+
"XPQARetrieval (pol-eng)": 48.53,
|
14730 |
+
"XPQARetrieval (por-por)": 51.17,
|
14731 |
+
"XPQARetrieval (eng-por)": 31.76,
|
14732 |
+
"XPQARetrieval (por-eng)": 48.79,
|
14733 |
+
"XPQARetrieval (tam-tam)": 49.69,
|
14734 |
+
"XPQARetrieval (eng-tam)": 6.8,
|
14735 |
+
"XPQARetrieval (tam-eng)": 40.83,
|
14736 |
+
"XPQARetrieval (cmn-cmn)": 66.42,
|
14737 |
+
"XPQARetrieval (eng-cmn)": 33.94,
|
14738 |
+
"XPQARetrieval (cmn-eng)": 59.78
|
14739 |
+
}
|
14740 |
+
]
|
14741 |
+
},
|
14742 |
+
"STS": {
|
14743 |
+
"cosine_spearman": [
|
14744 |
+
{
|
14745 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14746 |
+
"AFQMC": 37.53,
|
14747 |
+
"ATEC": 43.27,
|
14748 |
+
"BIOSSES": 87.46,
|
14749 |
+
"BQ": 48.8,
|
14750 |
+
"CDSC-R": 92.35,
|
14751 |
+
"LCQMC": 76.06,
|
14752 |
+
"PAWSX": 15.06,
|
14753 |
+
"RUParaPhraserSTS": 75.4,
|
14754 |
+
"RuSTSBenchmarkSTS": 83.97,
|
14755 |
+
"SICK-R": 81.73,
|
14756 |
+
"SICK-R-PL": 77.62,
|
14757 |
+
"SICKFr": 80.2,
|
14758 |
+
"STS12": 82.53,
|
14759 |
+
"STS13": 88.05,
|
14760 |
+
"STS14": 84.83,
|
14761 |
+
"STS15": 91.02,
|
14762 |
+
"STS16": 87.32,
|
14763 |
+
"STS17 (ar-ar)": 82.71,
|
14764 |
+
"STS17 (nl-en)": 87.44,
|
14765 |
+
"STS17 (fr-en)": 86.28,
|
14766 |
+
"STS17 (ko-ko)": 84.31,
|
14767 |
+
"STS17 (es-es)": 89.4,
|
14768 |
+
"STS17 (en-ar)": 81.83,
|
14769 |
+
"STS17 (en-de)": 87.61,
|
14770 |
+
"STS17 (en-tr)": 80.47,
|
14771 |
+
"STS17 (es-en)": 87.03,
|
14772 |
+
"STS17 (en-en)": 90.33,
|
14773 |
+
"STS17 (it-en)": 88.25,
|
14774 |
+
"STS22 (es-en)": 78.65,
|
14775 |
+
"STS22 (de)": 61.53,
|
14776 |
+
"STS22 (zh)": 67.4,
|
14777 |
+
"STS22 (es)": 68.45,
|
14778 |
+
"STS22 (de-fr)": 65.52,
|
14779 |
+
"STS22 (pl)": 40.97,
|
14780 |
+
"STS22 (ru)": 65.17,
|
14781 |
+
"STS22 (en)": 68.67,
|
14782 |
+
"STS22 (fr)": 82.25,
|
14783 |
+
"STS22 (es-it)": 75.25,
|
14784 |
+
"STS22 (pl-en)": 74.86,
|
14785 |
+
"STS22 (fr-pl)": 61.98,
|
14786 |
+
"STS22 (ar)": 59.52,
|
14787 |
+
"STS22 (it)": 79.45,
|
14788 |
+
"STS22 (tr)": 67.2,
|
14789 |
+
"STS22 (zh-en)": 71.45,
|
14790 |
+
"STS22 (de-pl)": 54.6,
|
14791 |
+
"STS22 (de-en)": 58.22,
|
14792 |
+
"STSB": 81.68,
|
14793 |
+
"STSBenchmark": 88.39,
|
14794 |
+
"STSBenchmarkMultilingualSTS (de)": 85.38,
|
14795 |
+
"STSBenchmarkMultilingualSTS (nl)": 84.29,
|
14796 |
+
"STSBenchmarkMultilingualSTS (pl)": 83.42,
|
14797 |
+
"STSBenchmarkMultilingualSTS (fr)": 84.94,
|
14798 |
+
"STSBenchmarkMultilingualSTS (pt)": 84.9,
|
14799 |
+
"STSBenchmarkMultilingualSTS (it)": 84.38,
|
14800 |
+
"STSBenchmarkMultilingualSTS (es)": 86.15,
|
14801 |
+
"STSBenchmarkMultilingualSTS (zh)": 82.54,
|
14802 |
+
"STSBenchmarkMultilingualSTS (en)": 88.39,
|
14803 |
+
"STSBenchmarkMultilingualSTS (ru)": 83.86
|
14804 |
+
},
|
14805 |
+
{
|
14806 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14807 |
+
"AFQMC": 37.53,
|
14808 |
+
"ATEC": 43.27,
|
14809 |
+
"BIOSSES": 87.46,
|
14810 |
+
"BQ": 48.8,
|
14811 |
+
"CDSC-R": 92.35,
|
14812 |
+
"LCQMC": 76.06,
|
14813 |
+
"PAWSX": 15.06,
|
14814 |
+
"RUParaPhraserSTS": 75.4,
|
14815 |
+
"RuSTSBenchmarkSTS": 83.97,
|
14816 |
+
"SICK-R": 81.73,
|
14817 |
+
"SICK-R-PL": 77.62,
|
14818 |
+
"SICKFr": 80.2,
|
14819 |
+
"STS12": 82.53,
|
14820 |
+
"STS13": 88.05,
|
14821 |
+
"STS14": 84.83,
|
14822 |
+
"STS15": 91.02,
|
14823 |
+
"STS16": 87.32,
|
14824 |
+
"STS17 (ar-ar)": 82.71,
|
14825 |
+
"STS17 (nl-en)": 87.44,
|
14826 |
+
"STS17 (fr-en)": 86.28,
|
14827 |
+
"STS17 (ko-ko)": 84.31,
|
14828 |
+
"STS17 (es-es)": 89.4,
|
14829 |
+
"STS17 (en-ar)": 81.83,
|
14830 |
+
"STS17 (en-de)": 87.61,
|
14831 |
+
"STS17 (en-tr)": 80.47,
|
14832 |
+
"STS17 (es-en)": 87.03,
|
14833 |
+
"STS17 (en-en)": 90.33,
|
14834 |
+
"STS17 (it-en)": 88.25,
|
14835 |
+
"STS22 (es-en)": 78.65,
|
14836 |
+
"STS22 (de)": 61.53,
|
14837 |
+
"STS22 (zh)": 67.4,
|
14838 |
+
"STS22 (es)": 68.45,
|
14839 |
+
"STS22 (de-fr)": 65.52,
|
14840 |
+
"STS22 (pl)": 40.97,
|
14841 |
+
"STS22 (ru)": 65.17,
|
14842 |
+
"STS22 (en)": 68.67,
|
14843 |
+
"STS22 (fr)": 82.25,
|
14844 |
+
"STS22 (es-it)": 75.25,
|
14845 |
+
"STS22 (pl-en)": 74.86,
|
14846 |
+
"STS22 (fr-pl)": 61.98,
|
14847 |
+
"STS22 (ar)": 59.52,
|
14848 |
+
"STS22 (it)": 79.45,
|
14849 |
+
"STS22 (tr)": 67.2,
|
14850 |
+
"STS22 (zh-en)": 71.45,
|
14851 |
+
"STS22 (de-pl)": 54.6,
|
14852 |
+
"STS22 (de-en)": 58.22,
|
14853 |
+
"STSB": 81.68,
|
14854 |
+
"STSBenchmark": 88.39,
|
14855 |
+
"STSBenchmarkMultilingualSTS (de)": 85.38,
|
14856 |
+
"STSBenchmarkMultilingualSTS (nl)": 84.29,
|
14857 |
+
"STSBenchmarkMultilingualSTS (pl)": 83.42,
|
14858 |
+
"STSBenchmarkMultilingualSTS (fr)": 84.94,
|
14859 |
+
"STSBenchmarkMultilingualSTS (pt)": 84.9,
|
14860 |
+
"STSBenchmarkMultilingualSTS (it)": 84.38,
|
14861 |
+
"STSBenchmarkMultilingualSTS (es)": 86.15,
|
14862 |
+
"STSBenchmarkMultilingualSTS (zh)": 82.54,
|
14863 |
+
"STSBenchmarkMultilingualSTS (en)": 88.39,
|
14864 |
+
"STSBenchmarkMultilingualSTS (ru)": 83.86
|
14865 |
+
}
|
14866 |
+
]
|
14867 |
+
},
|
14868 |
+
"Summarization": {
|
14869 |
+
"cosine_spearman": [
|
14870 |
+
{
|
14871 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14872 |
+
"SummEval": 30.46,
|
14873 |
+
"SummEvalFr": 31.72
|
14874 |
+
},
|
14875 |
+
{
|
14876 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14877 |
+
"SummEval": 30.46,
|
14878 |
+
"SummEvalFr": 31.72
|
14879 |
+
}
|
14880 |
+
]
|
14881 |
+
},
|
14882 |
+
"MultilabelClassification": {
|
14883 |
+
"accuracy": [
|
14884 |
+
{
|
14885 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14886 |
+
"CEDRClassification": 50.01,
|
14887 |
+
"SensitiveTopicsClassification": 32.29
|
14888 |
+
}
|
14889 |
+
]
|
14890 |
+
},
|
14891 |
+
"InstructionRetrieval": {
|
14892 |
+
"p-MRR": [
|
14893 |
+
{
|
14894 |
+
"Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large-instruct\">multilingual-e5-large-instruct</a>",
|
14895 |
+
"Core17InstructionRetrieval": 1.82,
|
14896 |
+
"News21InstructionRetrieval": 1.5,
|
14897 |
+
"Robust04InstructionRetrieval": -4.52
|
14898 |
+
}
|
14899 |
+
]
|
14900 |
+
}
|
14901 |
+
},
|
14902 |
"multilingual-e5-small": {
|
14903 |
"BitextMining": {
|
14904 |
"f1": [
|
model_meta.yaml
CHANGED
@@ -1072,6 +1072,15 @@ model_meta:
|
|
1072 |
is_proprietary: false
|
1073 |
is_sentence_transformers_compatible: true
|
1074 |
uses_instruct: false
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1075 |
multilingual-e5-small:
|
1076 |
link: https://huggingface.co/intfloat/multilingual-e5-small
|
1077 |
seq_len: 512
|
|
|
1072 |
is_proprietary: false
|
1073 |
is_sentence_transformers_compatible: true
|
1074 |
uses_instruct: false
|
1075 |
+
multilingual-e5-large-instruct:
|
1076 |
+
link: https://huggingface.co/intfloat/multilingual-e5-large-instruct
|
1077 |
+
seq_len: 514
|
1078 |
+
size: 560
|
1079 |
+
dim: 1024
|
1080 |
+
is_external: true
|
1081 |
+
is_proprietary: false
|
1082 |
+
is_sentence_transformers_compatible: true
|
1083 |
+
uses_instruct: true
|
1084 |
multilingual-e5-small:
|
1085 |
link: https://huggingface.co/intfloat/multilingual-e5-small
|
1086 |
seq_len: 512
|