leaderboard-test-results
/
MNCJ1hun
/MIstral-11B-Omni-OP-1k-2048-ver0.1
/result_2023-10-29 00:22:07.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.36177474402730375, | |
"acc_stderr": 0.01404195794503808, | |
"acc_norm": 0.41552901023890787, | |
"acc_norm_stderr": 0.014401366641216395 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.38179645488946423, | |
"acc_stderr": 0.00484834156049215, | |
"acc_norm": 0.4947221668990241, | |
"acc_norm_stderr": 0.004989503417767287 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.4853801169590643, | |
"acc_stderr": 0.038331852752130205, | |
"acc_norm": 0.4853801169590643, | |
"acc_norm_stderr": 0.038331852752130205 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.5825242718446602, | |
"acc_stderr": 0.048828405482122375, | |
"acc_norm": 0.5825242718446602, | |
"acc_norm_stderr": 0.048828405482122375 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.45849297573435505, | |
"acc_stderr": 0.017818248603465554, | |
"acc_norm": 0.45849297573435505, | |
"acc_norm_stderr": 0.017818248603465554 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.362962962962963, | |
"acc_stderr": 0.041539484047424, | |
"acc_norm": 0.362962962962963, | |
"acc_norm_stderr": 0.041539484047424 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.25, | |
"acc_stderr": 0.04351941398892446, | |
"acc_norm": 0.25, | |
"acc_norm_stderr": 0.04351941398892446 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.4553191489361702, | |
"acc_stderr": 0.03255525359340355, | |
"acc_norm": 0.4553191489361702, | |
"acc_norm_stderr": 0.03255525359340355 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.45180722891566266, | |
"acc_stderr": 0.03874371556587953, | |
"acc_norm": 0.45180722891566266, | |
"acc_norm_stderr": 0.03874371556587953 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.48231511254019294, | |
"acc_stderr": 0.02838032284907713, | |
"acc_norm": 0.48231511254019294, | |
"acc_norm_stderr": 0.02838032284907713 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.4663677130044843, | |
"acc_stderr": 0.033481800170603065, | |
"acc_norm": 0.4663677130044843, | |
"acc_norm_stderr": 0.033481800170603065 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.46564885496183206, | |
"acc_stderr": 0.043749285605997376, | |
"acc_norm": 0.46564885496183206, | |
"acc_norm_stderr": 0.043749285605997376 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.4, | |
"acc_stderr": 0.049236596391733084, | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.049236596391733084 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.5, | |
"acc_stderr": 0.035623524993954825, | |
"acc_norm": 0.5, | |
"acc_norm_stderr": 0.035623524993954825 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.4206896551724138, | |
"acc_stderr": 0.0411391498118926, | |
"acc_norm": 0.4206896551724138, | |
"acc_norm_stderr": 0.0411391498118926 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.21568627450980393, | |
"acc_stderr": 0.04092563958237654, | |
"acc_norm": 0.21568627450980393, | |
"acc_norm_stderr": 0.04092563958237654 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.5084033613445378, | |
"acc_stderr": 0.0324739027656967, | |
"acc_norm": 0.5084033613445378, | |
"acc_norm_stderr": 0.0324739027656967 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.4666666666666667, | |
"acc_stderr": 0.025294608023986483, | |
"acc_norm": 0.4666666666666667, | |
"acc_norm_stderr": 0.025294608023986483 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.53, | |
"acc_stderr": 0.05016135580465919, | |
"acc_norm": 0.53, | |
"acc_norm_stderr": 0.05016135580465919 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.04605661864718381, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.04605661864718381 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.5462962962962963, | |
"acc_stderr": 0.04812917324536823, | |
"acc_norm": 0.5462962962962963, | |
"acc_norm_stderr": 0.04812917324536823 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.42857142857142855, | |
"acc_stderr": 0.03481904844438804, | |
"acc_norm": 0.42857142857142855, | |
"acc_norm_stderr": 0.03481904844438804 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.5, | |
"acc_stderr": 0.028444006199428714, | |
"acc_norm": 0.5, | |
"acc_norm_stderr": 0.028444006199428714 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.7307692307692307, | |
"acc_stderr": 0.029058588303748845, | |
"acc_norm": 0.7307692307692307, | |
"acc_norm_stderr": 0.029058588303748845 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.45660377358490567, | |
"acc_stderr": 0.03065674869673943, | |
"acc_norm": 0.45660377358490567, | |
"acc_norm_stderr": 0.03065674869673943 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.4909090909090909, | |
"acc_stderr": 0.04788339768702861, | |
"acc_norm": 0.4909090909090909, | |
"acc_norm_stderr": 0.04788339768702861 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.0279404571362284, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.0279404571362284 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.2980132450331126, | |
"acc_stderr": 0.037345356767871984, | |
"acc_norm": 0.2980132450331126, | |
"acc_norm_stderr": 0.037345356767871984 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.6019900497512438, | |
"acc_stderr": 0.034611994290400135, | |
"acc_norm": 0.6019900497512438, | |
"acc_norm_stderr": 0.034611994290400135 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.3699421965317919, | |
"acc_stderr": 0.03681229633394319, | |
"acc_norm": 0.3699421965317919, | |
"acc_norm_stderr": 0.03681229633394319 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.36243386243386244, | |
"acc_stderr": 0.024757473902752052, | |
"acc_norm": 0.36243386243386244, | |
"acc_norm_stderr": 0.024757473902752052 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.3194444444444444, | |
"acc_stderr": 0.038990736873573344, | |
"acc_norm": 0.3194444444444444, | |
"acc_norm_stderr": 0.038990736873573344 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.39, | |
"acc_stderr": 0.04902071300001974, | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.04902071300001974 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.61, | |
"acc_stderr": 0.04902071300001975, | |
"acc_norm": 0.61, | |
"acc_norm_stderr": 0.04902071300001975 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.5, | |
"acc_stderr": 0.026919095102908273, | |
"acc_norm": 0.5, | |
"acc_norm_stderr": 0.026919095102908273 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.4662576687116564, | |
"acc_stderr": 0.039194155450484096, | |
"acc_norm": 0.4662576687116564, | |
"acc_norm_stderr": 0.039194155450484096 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.4351851851851852, | |
"acc_stderr": 0.027586006221607715, | |
"acc_norm": 0.4351851851851852, | |
"acc_norm_stderr": 0.027586006221607715 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.38, | |
"acc_stderr": 0.048783173121456316, | |
"acc_norm": 0.38, | |
"acc_norm_stderr": 0.048783173121456316 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.5025906735751295, | |
"acc_stderr": 0.03608390745384487, | |
"acc_norm": 0.5025906735751295, | |
"acc_norm_stderr": 0.03608390745384487 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2719298245614035, | |
"acc_stderr": 0.041857744240220575, | |
"acc_norm": 0.2719298245614035, | |
"acc_norm_stderr": 0.041857744240220575 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.48807339449541287, | |
"acc_stderr": 0.021431223617362223, | |
"acc_norm": 0.48807339449541287, | |
"acc_norm_stderr": 0.021431223617362223 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.373015873015873, | |
"acc_stderr": 0.04325506042017087, | |
"acc_norm": 0.373015873015873, | |
"acc_norm_stderr": 0.04325506042017087 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.46405228758169936, | |
"acc_stderr": 0.02855582751652879, | |
"acc_norm": 0.46405228758169936, | |
"acc_norm_stderr": 0.02855582751652879 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.48, | |
"acc_stderr": 0.050211673156867795, | |
"acc_norm": 0.48, | |
"acc_norm_stderr": 0.050211673156867795 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.6198347107438017, | |
"acc_stderr": 0.04431324501968432, | |
"acc_norm": 0.6198347107438017, | |
"acc_norm_stderr": 0.04431324501968432 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.4605263157894737, | |
"acc_stderr": 0.04056242252249033, | |
"acc_norm": 0.4605263157894737, | |
"acc_norm_stderr": 0.04056242252249033 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.380718954248366, | |
"acc_stderr": 0.019643801557924806, | |
"acc_norm": 0.380718954248366, | |
"acc_norm_stderr": 0.019643801557924806 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.35815602836879434, | |
"acc_stderr": 0.028602085862759426, | |
"acc_norm": 0.35815602836879434, | |
"acc_norm_stderr": 0.028602085862759426 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.375, | |
"acc_stderr": 0.04595091388086298, | |
"acc_norm": 0.375, | |
"acc_norm_stderr": 0.04595091388086298 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.4212962962962963, | |
"acc_stderr": 0.033674621388960775, | |
"acc_norm": 0.4212962962962963, | |
"acc_norm_stderr": 0.033674621388960775 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.21675977653631284, | |
"acc_stderr": 0.013780598486443354, | |
"acc_norm": 0.21675977653631284, | |
"acc_norm_stderr": 0.013780598486443354 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.48, | |
"acc_stderr": 0.050211673156867795, | |
"acc_norm": 0.48, | |
"acc_norm_stderr": 0.050211673156867795 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.56, | |
"acc_stderr": 0.04988876515698589, | |
"acc_norm": 0.56, | |
"acc_norm_stderr": 0.04988876515698589 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.35661764705882354, | |
"acc_stderr": 0.029097209568411952, | |
"acc_norm": 0.35661764705882354, | |
"acc_norm_stderr": 0.029097209568411952 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.5306122448979592, | |
"acc_stderr": 0.031949171367580624, | |
"acc_norm": 0.5306122448979592, | |
"acc_norm_stderr": 0.031949171367580624 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.5949367088607594, | |
"acc_stderr": 0.031955147413706725, | |
"acc_norm": 0.5949367088607594, | |
"acc_norm_stderr": 0.031955147413706725 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.3305084745762712, | |
"acc_stderr": 0.012014142101842977, | |
"acc_norm": 0.3305084745762712, | |
"acc_norm_stderr": 0.012014142101842977 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.47058823529411764, | |
"acc_stderr": 0.03503235296367993, | |
"acc_norm": 0.47058823529411764, | |
"acc_norm_stderr": 0.03503235296367993 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.46060606060606063, | |
"acc_stderr": 0.03892207016552013, | |
"acc_norm": 0.46060606060606063, | |
"acc_norm_stderr": 0.03892207016552013 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.2839657282741738, | |
"mc1_stderr": 0.015785370858396708, | |
"mc2": 0.4663054587466787, | |
"mc2_stderr": 0.015613323568757127 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.4805194805194805, | |
"acc_stderr": 0.017177301992342547, | |
"acc_norm": 0.51357733175915, | |
"acc_norm_stderr": 0.01718401506040145 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "MNCJ1hun/MIstral-11B-Omni-OP-1k-2048-ver0.1", | |
"model_sha": "a64bcca1371fa2285981fc40dbd8b879857f1e2e", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |