ModernBERT-base-msmarco / mteb /MTOPDomainClassification.json
joe32140's picture
Upload 68 files
81d107a verified
raw
history blame
1.74 kB
{
"dataset_revision": "d80d48c1eb48d3562165c59d59d0034df9fff0bf",
"task_name": "MTOPDomainClassification",
"mteb_version": "1.25.1",
"scores": {
"test": [
{
"accuracy": 0.889124,
"f1": 0.88758,
"f1_weighted": 0.889747,
"scores_per_experiment": [
{
"accuracy": 0.854537,
"f1": 0.857721,
"f1_weighted": 0.855151
},
{
"accuracy": 0.900593,
"f1": 0.89766,
"f1_weighted": 0.901084
},
{
"accuracy": 0.901049,
"f1": 0.898648,
"f1_weighted": 0.900877
},
{
"accuracy": 0.897629,
"f1": 0.894282,
"f1_weighted": 0.898338
},
{
"accuracy": 0.891473,
"f1": 0.890253,
"f1_weighted": 0.892135
},
{
"accuracy": 0.882809,
"f1": 0.884257,
"f1_weighted": 0.885064
},
{
"accuracy": 0.887597,
"f1": 0.884408,
"f1_weighted": 0.887453
},
{
"accuracy": 0.891017,
"f1": 0.887588,
"f1_weighted": 0.890818
},
{
"accuracy": 0.891929,
"f1": 0.89255,
"f1_weighted": 0.892768
},
{
"accuracy": 0.892613,
"f1": 0.888437,
"f1_weighted": 0.893779
}
],
"main_score": 0.889124,
"hf_subset": "en",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 10.538909912109375,
"kg_co2_emissions": null
}