{ "dataset_revision": "ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba", "task_name": "MTOPIntentClassification", "mteb_version": "1.25.1", "scores": { "test": [ { "accuracy": 0.656475, "f1": 0.490948, "f1_weighted": 0.693994, "scores_per_experiment": [ { "accuracy": 0.641587, "f1": 0.456483, "f1_weighted": 0.683725 }, { "accuracy": 0.671911, "f1": 0.48881, "f1_weighted": 0.709701 }, { "accuracy": 0.673279, "f1": 0.501567, "f1_weighted": 0.711032 }, { "accuracy": 0.656407, "f1": 0.514271, "f1_weighted": 0.692525 }, { "accuracy": 0.639763, "f1": 0.480901, "f1_weighted": 0.671092 }, { "accuracy": 0.645691, "f1": 0.501634, "f1_weighted": 0.685544 }, { "accuracy": 0.663703, "f1": 0.479191, "f1_weighted": 0.704379 }, { "accuracy": 0.674647, "f1": 0.516005, "f1_weighted": 0.709525 }, { "accuracy": 0.650935, "f1": 0.480141, "f1_weighted": 0.689081 }, { "accuracy": 0.646831, "f1": 0.490481, "f1_weighted": 0.683338 } ], "main_score": 0.656475, "hf_subset": "en", "languages": [ "eng-Latn" ] } ] }, "evaluation_time": 13.826844215393066, "kg_co2_emissions": null }