Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
938 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 95.23,
"map_at_1": 0.20652,
"map_at_10": 0.27558,
"map_at_100": 0.28473,
"map_at_1000": 0.28577,
"map_at_3": 0.25402,
"map_at_5": 0.2668,
"mrr_at_1": 0.25223,
"mrr_at_10": 0.31966,
"mrr_at_100": 0.32664,
"mrr_at_1000": 0.32724,
"mrr_at_3": 0.30074,
"mrr_at_5": 0.31249,
"ndcg_at_1": 0.25223,
"ndcg_at_10": 0.31694,
"ndcg_at_100": 0.35662,
"ndcg_at_1000": 0.38092,
"ndcg_at_3": 0.28294,
"ndcg_at_5": 0.30049,
"precision_at_1": 0.25223,
"precision_at_10": 0.05777,
"precision_at_100": 0.00973,
"precision_at_1000": 0.0014,
"precision_at_3": 0.13397,
"precision_at_5": 0.09605,
"recall_at_1": 0.20652,
"recall_at_10": 0.39368,
"recall_at_100": 0.56485,
"recall_at_1000": 0.73292,
"recall_at_3": 0.2983,
"recall_at_5": 0.3443
}
}