File size: 1,737 Bytes
81d107a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
{
"dataset_revision": "ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba",
"task_name": "MTOPIntentClassification",
"mteb_version": "1.25.1",
"scores": {
"test": [
{
"accuracy": 0.656475,
"f1": 0.490948,
"f1_weighted": 0.693994,
"scores_per_experiment": [
{
"accuracy": 0.641587,
"f1": 0.456483,
"f1_weighted": 0.683725
},
{
"accuracy": 0.671911,
"f1": 0.48881,
"f1_weighted": 0.709701
},
{
"accuracy": 0.673279,
"f1": 0.501567,
"f1_weighted": 0.711032
},
{
"accuracy": 0.656407,
"f1": 0.514271,
"f1_weighted": 0.692525
},
{
"accuracy": 0.639763,
"f1": 0.480901,
"f1_weighted": 0.671092
},
{
"accuracy": 0.645691,
"f1": 0.501634,
"f1_weighted": 0.685544
},
{
"accuracy": 0.663703,
"f1": 0.479191,
"f1_weighted": 0.704379
},
{
"accuracy": 0.674647,
"f1": 0.516005,
"f1_weighted": 0.709525
},
{
"accuracy": 0.650935,
"f1": 0.480141,
"f1_weighted": 0.689081
},
{
"accuracy": 0.646831,
"f1": 0.490481,
"f1_weighted": 0.683338
}
],
"main_score": 0.656475,
"hf_subset": "en",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 13.826844215393066,
"kg_co2_emissions": null
} |