{ "dataset_revision": "4672e20407010da34463acc759c162ca9734bca6", "task_name": "MassiveIntentClassification", "mteb_version": "1.25.1", "scores": { "test": [ { "accuracy": 0.652354, "f1": 0.635945, "f1_weighted": 0.650395, "scores_per_experiment": [ { "accuracy": 0.644923, "f1": 0.639513, "f1_weighted": 0.640635 }, { "accuracy": 0.677539, "f1": 0.652575, "f1_weighted": 0.67554 }, { "accuracy": 0.649294, "f1": 0.630992, "f1_weighted": 0.645129 }, { "accuracy": 0.680565, "f1": 0.659978, "f1_weighted": 0.675892 }, { "accuracy": 0.653329, "f1": 0.637184, "f1_weighted": 0.649634 }, { "accuracy": 0.641896, "f1": 0.624381, "f1_weighted": 0.640526 }, { "accuracy": 0.642233, "f1": 0.633383, "f1_weighted": 0.643923 }, { "accuracy": 0.638534, "f1": 0.618431, "f1_weighted": 0.633252 }, { "accuracy": 0.64694, "f1": 0.63405, "f1_weighted": 0.649656 }, { "accuracy": 0.648285, "f1": 0.628963, "f1_weighted": 0.649767 } ], "main_score": 0.652354, "hf_subset": "en", "languages": [ "eng-Latn" ] } ] }, "evaluation_time": 11.81202483177185, "kg_co2_emissions": null }