ModernBERT-base-msmarco / mteb /AmazonPolarityClassification.json
joe32140's picture
Upload 68 files
81d107a verified
{
"dataset_revision": "e2d317d38cd51312af73b3d32a06d1a08b442046",
"task_name": "AmazonPolarityClassification",
"mteb_version": "1.25.1",
"scores": {
"test": [
{
"accuracy": 0.671713,
"f1": 0.669443,
"f1_weighted": 0.669443,
"ap": 0.617296,
"ap_weighted": 0.617296,
"scores_per_experiment": [
{
"accuracy": 0.730195,
"f1": 0.729644,
"f1_weighted": 0.729644,
"ap": 0.673348,
"ap_weighted": 0.673348
},
{
"accuracy": 0.648605,
"f1": 0.647126,
"f1_weighted": 0.647126,
"ap": 0.593854,
"ap_weighted": 0.593854
},
{
"accuracy": 0.720182,
"f1": 0.716345,
"f1_weighted": 0.716345,
"ap": 0.649422,
"ap_weighted": 0.649422
},
{
"accuracy": 0.717512,
"f1": 0.715561,
"f1_weighted": 0.715561,
"ap": 0.665462,
"ap_weighted": 0.665462
},
{
"accuracy": 0.660528,
"f1": 0.659856,
"f1_weighted": 0.659856,
"ap": 0.60393,
"ap_weighted": 0.60393
},
{
"accuracy": 0.66763,
"f1": 0.66655,
"f1_weighted": 0.66655,
"ap": 0.609044,
"ap_weighted": 0.609044
},
{
"accuracy": 0.67263,
"f1": 0.670559,
"f1_weighted": 0.670559,
"ap": 0.612038,
"ap_weighted": 0.612038
},
{
"accuracy": 0.677455,
"f1": 0.674938,
"f1_weighted": 0.674938,
"ap": 0.626943,
"ap_weighted": 0.626943
},
{
"accuracy": 0.582143,
"f1": 0.574911,
"f1_weighted": 0.574911,
"ap": 0.546423,
"ap_weighted": 0.546423
},
{
"accuracy": 0.64025,
"f1": 0.638938,
"f1_weighted": 0.638938,
"ap": 0.592492,
"ap_weighted": 0.592492
}
],
"main_score": 0.671713,
"hf_subset": "default",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 439.54625058174133,
"kg_co2_emissions": null
}