SGPT-125M-weightedmean-msmarco-specb-bitfit / evaluation /mteb /CQADupstackWebmastersRetrieval.json
Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
937 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 62.83,
"map_at_1": 0.1739,
"map_at_10": 0.23058,
"map_at_100": 0.24445,
"map_at_1000": 0.24638,
"map_at_3": 0.21037,
"map_at_5": 0.21966,
"mrr_at_1": 0.1996,
"mrr_at_10": 0.26301,
"mrr_at_100": 0.27297,
"mrr_at_1000": 0.27375,
"mrr_at_3": 0.24341,
"mrr_at_5": 0.25339,
"ndcg_at_1": 0.1996,
"ndcg_at_10": 0.27249,
"ndcg_at_100": 0.32997,
"ndcg_at_1000": 0.36359,
"ndcg_at_3": 0.23519,
"ndcg_at_5": 0.24915,
"precision_at_1": 0.1996,
"precision_at_10": 0.05356,
"precision_at_100": 0.01198,
"precision_at_1000": 0.00204,
"precision_at_3": 0.10738,
"precision_at_5": 0.07905,
"recall_at_1": 0.1739,
"recall_at_10": 0.35255,
"recall_at_100": 0.61351,
"recall_at_1000": 0.84395,
"recall_at_3": 0.25194,
"recall_at_5": 0.28546
}
}