leaderboard-test-results
/
Kaeri-Jenti
/LDCC-with-openorca-and-korca
/result_2023-11-06 11:07:08.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.4035836177474403, | |
"acc_stderr": 0.014337158914268436, | |
"acc_norm": 0.45563139931740615, | |
"acc_norm_stderr": 0.014553749939306864 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.42162915753833896, | |
"acc_stderr": 0.004928105880776079, | |
"acc_norm": 0.566122286397132, | |
"acc_norm_stderr": 0.004945956744943813 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.5029239766081871, | |
"acc_stderr": 0.03834759370936839, | |
"acc_norm": 0.5029239766081871, | |
"acc_norm_stderr": 0.03834759370936839 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.4854368932038835, | |
"acc_stderr": 0.04948637324026637, | |
"acc_norm": 0.4854368932038835, | |
"acc_norm_stderr": 0.04948637324026637 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.5440613026819924, | |
"acc_stderr": 0.017810403925435342, | |
"acc_norm": 0.5440613026819924, | |
"acc_norm_stderr": 0.017810403925435342 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.4666666666666667, | |
"acc_stderr": 0.04309732901036354, | |
"acc_norm": 0.4666666666666667, | |
"acc_norm_stderr": 0.04309732901036354 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.26, | |
"acc_stderr": 0.044084400227680794, | |
"acc_norm": 0.26, | |
"acc_norm_stderr": 0.044084400227680794 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.39574468085106385, | |
"acc_stderr": 0.031967586978353627, | |
"acc_norm": 0.39574468085106385, | |
"acc_norm_stderr": 0.031967586978353627 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.42771084337349397, | |
"acc_stderr": 0.038515976837185335, | |
"acc_norm": 0.42771084337349397, | |
"acc_norm_stderr": 0.038515976837185335 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.49517684887459806, | |
"acc_stderr": 0.028396770444111298, | |
"acc_norm": 0.49517684887459806, | |
"acc_norm_stderr": 0.028396770444111298 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.484304932735426, | |
"acc_stderr": 0.0335412657542081, | |
"acc_norm": 0.484304932735426, | |
"acc_norm_stderr": 0.0335412657542081 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.4732824427480916, | |
"acc_stderr": 0.04379024936553894, | |
"acc_norm": 0.4732824427480916, | |
"acc_norm_stderr": 0.04379024936553894 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.47, | |
"acc_stderr": 0.05016135580465919, | |
"acc_norm": 0.47, | |
"acc_norm_stderr": 0.05016135580465919 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.5303030303030303, | |
"acc_stderr": 0.03555804051763929, | |
"acc_norm": 0.5303030303030303, | |
"acc_norm_stderr": 0.03555804051763929 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.41379310344827586, | |
"acc_stderr": 0.041042692118062316, | |
"acc_norm": 0.41379310344827586, | |
"acc_norm_stderr": 0.041042692118062316 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.2549019607843137, | |
"acc_stderr": 0.0433643270799318, | |
"acc_norm": 0.2549019607843137, | |
"acc_norm_stderr": 0.0433643270799318 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.42857142857142855, | |
"acc_stderr": 0.032145368597886394, | |
"acc_norm": 0.42857142857142855, | |
"acc_norm_stderr": 0.032145368597886394 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.44358974358974357, | |
"acc_stderr": 0.025189149894764194, | |
"acc_norm": 0.44358974358974357, | |
"acc_norm_stderr": 0.025189149894764194 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.54, | |
"acc_stderr": 0.05009082659620332, | |
"acc_norm": 0.54, | |
"acc_norm_stderr": 0.05009082659620332 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.33, | |
"acc_stderr": 0.04725815626252604, | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.04725815626252604 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.4722222222222222, | |
"acc_stderr": 0.048262172941398944, | |
"acc_norm": 0.4722222222222222, | |
"acc_norm_stderr": 0.048262172941398944 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.3891625615763547, | |
"acc_stderr": 0.034304624161038716, | |
"acc_norm": 0.3891625615763547, | |
"acc_norm_stderr": 0.034304624161038716 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.4483870967741935, | |
"acc_stderr": 0.028292056830112735, | |
"acc_norm": 0.4483870967741935, | |
"acc_norm_stderr": 0.028292056830112735 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.6196581196581197, | |
"acc_stderr": 0.031804252043840985, | |
"acc_norm": 0.6196581196581197, | |
"acc_norm_stderr": 0.031804252043840985 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.4, | |
"acc_stderr": 0.030151134457776285, | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.030151134457776285 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.5727272727272728, | |
"acc_stderr": 0.047381987035454834, | |
"acc_norm": 0.5727272727272728, | |
"acc_norm_stderr": 0.047381987035454834 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.25925925925925924, | |
"acc_stderr": 0.026719240783712166, | |
"acc_norm": 0.25925925925925924, | |
"acc_norm_stderr": 0.026719240783712166 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.31788079470198677, | |
"acc_stderr": 0.038020397601079024, | |
"acc_norm": 0.31788079470198677, | |
"acc_norm_stderr": 0.038020397601079024 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.5970149253731343, | |
"acc_stderr": 0.034683432951111266, | |
"acc_norm": 0.5970149253731343, | |
"acc_norm_stderr": 0.034683432951111266 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.3930635838150289, | |
"acc_stderr": 0.0372424959581773, | |
"acc_norm": 0.3930635838150289, | |
"acc_norm_stderr": 0.0372424959581773 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.29365079365079366, | |
"acc_stderr": 0.023456037383982022, | |
"acc_norm": 0.29365079365079366, | |
"acc_norm_stderr": 0.023456037383982022 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.3680555555555556, | |
"acc_stderr": 0.04032999053960718, | |
"acc_norm": 0.3680555555555556, | |
"acc_norm_stderr": 0.04032999053960718 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.32, | |
"acc_stderr": 0.04688261722621504, | |
"acc_norm": 0.32, | |
"acc_norm_stderr": 0.04688261722621504 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.69, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.69, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.5144508670520231, | |
"acc_stderr": 0.026907849856282542, | |
"acc_norm": 0.5144508670520231, | |
"acc_norm_stderr": 0.026907849856282542 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.5214723926380368, | |
"acc_stderr": 0.03924746876751129, | |
"acc_norm": 0.5214723926380368, | |
"acc_norm_stderr": 0.03924746876751129 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.49074074074074076, | |
"acc_stderr": 0.027815973433878014, | |
"acc_norm": 0.49074074074074076, | |
"acc_norm_stderr": 0.027815973433878014 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.5284974093264249, | |
"acc_stderr": 0.03602573571288442, | |
"acc_norm": 0.5284974093264249, | |
"acc_norm_stderr": 0.03602573571288442 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2894736842105263, | |
"acc_stderr": 0.04266339443159394, | |
"acc_norm": 0.2894736842105263, | |
"acc_norm_stderr": 0.04266339443159394 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.563302752293578, | |
"acc_stderr": 0.021264820158714205, | |
"acc_norm": 0.563302752293578, | |
"acc_norm_stderr": 0.021264820158714205 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.30952380952380953, | |
"acc_stderr": 0.04134913018303316, | |
"acc_norm": 0.30952380952380953, | |
"acc_norm_stderr": 0.04134913018303316 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.39869281045751637, | |
"acc_stderr": 0.028036092273891765, | |
"acc_norm": 0.39869281045751637, | |
"acc_norm_stderr": 0.028036092273891765 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.46, | |
"acc_stderr": 0.05009082659620332, | |
"acc_norm": 0.46, | |
"acc_norm_stderr": 0.05009082659620332 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.6859504132231405, | |
"acc_stderr": 0.04236964753041018, | |
"acc_norm": 0.6859504132231405, | |
"acc_norm_stderr": 0.04236964753041018 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.3881578947368421, | |
"acc_stderr": 0.03965842097512744, | |
"acc_norm": 0.3881578947368421, | |
"acc_norm_stderr": 0.03965842097512744 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.41013071895424835, | |
"acc_stderr": 0.019898412717635892, | |
"acc_norm": 0.41013071895424835, | |
"acc_norm_stderr": 0.019898412717635892 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.3617021276595745, | |
"acc_stderr": 0.028663820147199492, | |
"acc_norm": 0.3617021276595745, | |
"acc_norm_stderr": 0.028663820147199492 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.25892857142857145, | |
"acc_stderr": 0.04157751539865629, | |
"acc_norm": 0.25892857142857145, | |
"acc_norm_stderr": 0.04157751539865629 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.2962962962962963, | |
"acc_stderr": 0.031141447823536048, | |
"acc_norm": 0.2962962962962963, | |
"acc_norm_stderr": 0.031141447823536048 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2446927374301676, | |
"acc_stderr": 0.014378169884098424, | |
"acc_norm": 0.2446927374301676, | |
"acc_norm_stderr": 0.014378169884098424 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.4, | |
"acc_stderr": 0.049236596391733084, | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.049236596391733084 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.45, | |
"acc_stderr": 0.049999999999999996, | |
"acc_norm": 0.45, | |
"acc_norm_stderr": 0.049999999999999996 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.33455882352941174, | |
"acc_stderr": 0.028661996202335314, | |
"acc_norm": 0.33455882352941174, | |
"acc_norm_stderr": 0.028661996202335314 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.4857142857142857, | |
"acc_stderr": 0.031996152328062875, | |
"acc_norm": 0.4857142857142857, | |
"acc_norm_stderr": 0.031996152328062875 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.6540084388185654, | |
"acc_stderr": 0.03096481058878671, | |
"acc_norm": 0.6540084388185654, | |
"acc_norm_stderr": 0.03096481058878671 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.3683181225554107, | |
"acc_stderr": 0.012319403369564642, | |
"acc_norm": 0.3683181225554107, | |
"acc_norm_stderr": 0.012319403369564642 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.4950980392156863, | |
"acc_stderr": 0.03509143375606786, | |
"acc_norm": 0.4950980392156863, | |
"acc_norm_stderr": 0.03509143375606786 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.5272727272727272, | |
"acc_stderr": 0.03898531605579418, | |
"acc_norm": 0.5272727272727272, | |
"acc_norm_stderr": 0.03898531605579418 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.29008567931456547, | |
"mc1_stderr": 0.01588623687420952, | |
"mc2": 0.448398942069094, | |
"mc2_stderr": 0.015159190515111855 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.4923258559622196, | |
"acc_stderr": 0.01718832921965428, | |
"acc_norm": 0.6221959858323495, | |
"acc_norm_stderr": 0.01666908284069498 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "Kaeri-Jenti/LDCC-with-openorca-and-korca", | |
"model_sha": "7f845005dc24e13e4fe380e32aa1b0f649b85743", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |