Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
322 Bytes
{
"test": {
"accuracy": 0.7770454545454545,
"accuracy_stderr": 0.007521053263962387,
"evaluation_time": 42.98,
"f1": 0.7769290001138031,
"f1_stderr": 0.007473720123531678,
"main_score": 0.7770454545454545
},
"dataset_version": null,
"mteb_version": "0.0.2"
}