Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
937 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 300.62,
"map_at_1": 0.09279,
"map_at_10": 0.13822,
"map_at_100": 0.14533,
"map_at_1000": 0.1465,
"map_at_3": 0.12396,
"map_at_5": 0.13214,
"mrr_at_1": 0.11149,
"mrr_at_10": 0.16139,
"mrr_at_100": 0.16872,
"mrr_at_1000": 0.16964,
"mrr_at_3": 0.14613,
"mrr_at_5": 0.15486,
"ndcg_at_1": 0.11149,
"ndcg_at_10": 0.1682,
"ndcg_at_100": 0.2073,
"ndcg_at_1000": 0.23894,
"ndcg_at_3": 0.1411,
"ndcg_at_5": 0.15404,
"precision_at_1": 0.11149,
"precision_at_10": 0.03063,
"precision_at_100": 0.00587,
"precision_at_1000": 0.001,
"precision_at_3": 0.06699,
"precision_at_5": 0.04928,
"recall_at_1": 0.09279,
"recall_at_10": 0.23745,
"recall_at_100": 0.41873,
"recall_at_1000": 0.64982,
"recall_at_3": 0.16152,
"recall_at_5": 0.19409
}
}