Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
942 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 3659.36,
"map_at_1": 0.25544,
"map_at_10": 0.3262,
"map_at_100": 0.33275,
"map_at_1000": 0.33344,
"map_at_3": 0.30851,
"map_at_5": 0.31869,
"mrr_at_1": 0.51087,
"mrr_at_10": 0.57704,
"mrr_at_100": 0.58175,
"mrr_at_1000": 0.58207,
"mrr_at_3": 0.56106,
"mrr_at_5": 0.57074,
"ndcg_at_1": 0.51087,
"ndcg_at_10": 0.40876,
"ndcg_at_100": 0.43762,
"ndcg_at_1000": 0.45423,
"ndcg_at_3": 0.3765,
"ndcg_at_5": 0.39305,
"precision_at_1": 0.51087,
"precision_at_10": 0.08304,
"precision_at_100": 0.01059,
"precision_at_1000": 0.00128,
"precision_at_3": 0.22876,
"precision_at_5": 0.15033,
"recall_at_1": 0.25544,
"recall_at_10": 0.41519,
"recall_at_100": 0.52957,
"recall_at_1000": 0.64132,
"recall_at_3": 0.34315,
"recall_at_5": 0.37583
}
}