Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
378 Bytes
{
"test": {
"accuracy": 0.586696,
"accuracy_stderr": 0.04366294612139682,
"ap": 0.553644880984279,
"ap_stderr": 0.02927763434288163,
"evaluation_time": 1493.91,
"f1": 0.5807942097405652,
"f1_stderr": 0.046729535979483976,
"main_score": 0.586696
},
"dataset_version": null,
"mteb_version": "0.0.2"
}