Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
948 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"validation": {
"evaluation_time": 6441.95,
"map_at_1": 0.14442,
"map_at_10": 0.22932,
"map_at_100": 0.24132,
"map_at_1000": 0.24213,
"map_at_3": 0.20002,
"map_at_5": 0.21636,
"mrr_at_1": 0.14842,
"mrr_at_10": 0.23416,
"mrr_at_100": 0.24594,
"mrr_at_1000": 0.24669,
"mrr_at_3": 0.20494,
"mrr_at_5": 0.2214,
"ndcg_at_1": 0.14842,
"ndcg_at_10": 0.27975,
"ndcg_at_100": 0.34143,
"ndcg_at_1000": 0.3637,
"ndcg_at_3": 0.21944,
"ndcg_at_5": 0.24881,
"precision_at_1": 0.14842,
"precision_at_10": 0.04537,
"precision_at_100": 0.00767,
"precision_at_1000": 0.00096,
"precision_at_3": 0.09322,
"precision_at_5": 0.07074,
"recall_at_1": 0.14442,
"recall_at_10": 0.43557,
"recall_at_100": 0.72904,
"recall_at_1000": 0.90407,
"recall_at_3": 0.27088,
"recall_at_5": 0.34144
}
}