Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
932 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 102.16,
"map_at_1": 0.03028,
"map_at_10": 0.06968,
"map_at_100": 0.082,
"map_at_1000": 0.08432,
"map_at_3": 0.05307,
"map_at_5": 0.06099,
"mrr_at_1": 0.148,
"mrr_at_10": 0.22425,
"mrr_at_100": 0.23577,
"mrr_at_1000": 0.2367,
"mrr_at_3": 0.20233,
"mrr_at_5": 0.21318,
"ndcg_at_1": 0.148,
"ndcg_at_10": 0.12206,
"ndcg_at_100": 0.17799,
"ndcg_at_1000": 0.22891,
"ndcg_at_3": 0.12128,
"ndcg_at_5": 0.10212,
"precision_at_1": 0.148,
"precision_at_10": 0.0617,
"precision_at_100": 0.01428,
"precision_at_1000": 0.00266,
"precision_at_3": 0.11333,
"precision_at_5": 0.0874,
"recall_at_1": 0.03028,
"recall_at_10": 0.12522,
"recall_at_100": 0.28975,
"recall_at_1000": 0.54038,
"recall_at_3": 0.06913,
"recall_at_5": 0.08883
}
}