Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
940 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 76.89,
"map_at_1": 0.22404,
"map_at_10": 0.36845,
"map_at_100": 0.37945,
"map_at_1000": 0.37966,
"map_at_3": 0.3178,
"map_at_5": 0.34608,
"mrr_at_1": 0.22902,
"mrr_at_10": 0.37034,
"mrr_at_100": 0.38134,
"mrr_at_1000": 0.38155,
"mrr_at_3": 0.31935,
"mrr_at_5": 0.34812,
"ndcg_at_1": 0.22404,
"ndcg_at_10": 0.45425,
"ndcg_at_100": 0.50354,
"ndcg_at_1000": 0.50874,
"ndcg_at_3": 0.3497,
"ndcg_at_5": 0.40081,
"precision_at_1": 0.22404,
"precision_at_10": 0.07304,
"precision_at_100": 0.00951,
"precision_at_1000": 0.00099,
"precision_at_3": 0.14746,
"precision_at_5": 0.11337,
"recall_at_1": 0.22404,
"recall_at_10": 0.73044,
"recall_at_100": 0.95092,
"recall_at_1000": 0.99075,
"recall_at_3": 0.44239,
"recall_at_5": 0.56686
}
}