ModernBERT-base-msmarco / mteb /AmazonCounterfactualClassification.json
joe32140's picture
Upload 68 files
81d107a verified
{
"dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
"task_name": "AmazonCounterfactualClassification",
"mteb_version": "1.25.1",
"scores": {
"test": [
{
"accuracy": 0.65997,
"f1": 0.535951,
"f1_weighted": 0.725304,
"ap": 0.163093,
"ap_weighted": 0.163093,
"scores_per_experiment": [
{
"accuracy": 0.644678,
"f1": 0.531347,
"f1_weighted": 0.713782,
"ap": 0.166602,
"ap_weighted": 0.166602
},
{
"accuracy": 0.608696,
"f1": 0.518256,
"f1_weighted": 0.683489,
"ap": 0.176106,
"ap_weighted": 0.176106
},
{
"accuracy": 0.643928,
"f1": 0.529901,
"f1_weighted": 0.713177,
"ap": 0.164896,
"ap_weighted": 0.164896
},
{
"accuracy": 0.641679,
"f1": 0.51983,
"f1_weighted": 0.711307,
"ap": 0.151661,
"ap_weighted": 0.151661
},
{
"accuracy": 0.668666,
"f1": 0.544738,
"f1_weighted": 0.732766,
"ap": 0.169199,
"ap_weighted": 0.169199
},
{
"accuracy": 0.707646,
"f1": 0.550963,
"f1_weighted": 0.760934,
"ap": 0.15327,
"ap_weighted": 0.15327
},
{
"accuracy": 0.654423,
"f1": 0.525677,
"f1_weighted": 0.721296,
"ap": 0.151452,
"ap_weighted": 0.151452
},
{
"accuracy": 0.664918,
"f1": 0.539092,
"f1_weighted": 0.729726,
"ap": 0.163361,
"ap_weighted": 0.163361
},
{
"accuracy": 0.696402,
"f1": 0.563025,
"f1_weighted": 0.754132,
"ap": 0.176998,
"ap_weighted": 0.176998
},
{
"accuracy": 0.668666,
"f1": 0.536679,
"f1_weighted": 0.732435,
"ap": 0.15738,
"ap_weighted": 0.15738
}
],
"main_score": 0.65997,
"hf_subset": "en-ext",
"languages": [
"eng-Latn"
]
},
{
"accuracy": 0.64194,
"f1": 0.585969,
"f1_weighted": 0.677482,
"ap": 0.280748,
"ap_weighted": 0.280748,
"scores_per_experiment": [
{
"accuracy": 0.659701,
"f1": 0.606678,
"f1_weighted": 0.694619,
"ap": 0.299469,
"ap_weighted": 0.299469
},
{
"accuracy": 0.61194,
"f1": 0.564082,
"f1_weighted": 0.652038,
"ap": 0.268001,
"ap_weighted": 0.268001
},
{
"accuracy": 0.592537,
"f1": 0.553977,
"f1_weighted": 0.633838,
"ap": 0.269473,
"ap_weighted": 0.269473
},
{
"accuracy": 0.61194,
"f1": 0.565044,
"f1_weighted": 0.652016,
"ap": 0.269697,
"ap_weighted": 0.269697
},
{
"accuracy": 0.668657,
"f1": 0.603271,
"f1_weighted": 0.70135,
"ap": 0.285582,
"ap_weighted": 0.285582
},
{
"accuracy": 0.625373,
"f1": 0.56863,
"f1_weighted": 0.663902,
"ap": 0.26389,
"ap_weighted": 0.26389
},
{
"accuracy": 0.744776,
"f1": 0.648233,
"f1_weighted": 0.760454,
"ap": 0.307423,
"ap_weighted": 0.307423
},
{
"accuracy": 0.667164,
"f1": 0.613827,
"f1_weighted": 0.701223,
"ap": 0.30575,
"ap_weighted": 0.30575
},
{
"accuracy": 0.591045,
"f1": 0.547427,
"f1_weighted": 0.632985,
"ap": 0.259001,
"ap_weighted": 0.259001
},
{
"accuracy": 0.646269,
"f1": 0.588519,
"f1_weighted": 0.682391,
"ap": 0.279194,
"ap_weighted": 0.279194
}
],
"main_score": 0.64194,
"hf_subset": "en",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 18.05203342437744,
"kg_co2_emissions": null
}