|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3703071672354949, |
|
"acc_stderr": 0.01411129875167495, |
|
"acc_norm": 0.431740614334471, |
|
"acc_norm_stderr": 0.014474591427196202 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3882692690699064, |
|
"acc_stderr": 0.004863603638367454, |
|
"acc_norm": 0.5113523202549293, |
|
"acc_norm_stderr": 0.004988495127747284 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.6198830409356725, |
|
"acc_stderr": 0.037229657413855394, |
|
"acc_norm": 0.6198830409356725, |
|
"acc_norm_stderr": 0.037229657413855394 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6019417475728155, |
|
"acc_stderr": 0.048467482539772386, |
|
"acc_norm": 0.6019417475728155, |
|
"acc_norm_stderr": 0.048467482539772386 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.524904214559387, |
|
"acc_stderr": 0.017857770704901035, |
|
"acc_norm": 0.524904214559387, |
|
"acc_norm_stderr": 0.017857770704901035 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4888888888888889, |
|
"acc_stderr": 0.04318275491977976, |
|
"acc_norm": 0.4888888888888889, |
|
"acc_norm_stderr": 0.04318275491977976 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.44680851063829785, |
|
"acc_stderr": 0.0325005368436584, |
|
"acc_norm": 0.44680851063829785, |
|
"acc_norm_stderr": 0.0325005368436584 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4036144578313253, |
|
"acc_stderr": 0.03819486140758397, |
|
"acc_norm": 0.4036144578313253, |
|
"acc_norm_stderr": 0.03819486140758397 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5144694533762058, |
|
"acc_stderr": 0.02838619808417768, |
|
"acc_norm": 0.5144694533762058, |
|
"acc_norm_stderr": 0.02838619808417768 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5022421524663677, |
|
"acc_stderr": 0.033557465352232634, |
|
"acc_norm": 0.5022421524663677, |
|
"acc_norm_stderr": 0.033557465352232634 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.549618320610687, |
|
"acc_stderr": 0.04363643698524779, |
|
"acc_norm": 0.549618320610687, |
|
"acc_norm_stderr": 0.04363643698524779 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6262626262626263, |
|
"acc_stderr": 0.034468977386593325, |
|
"acc_norm": 0.6262626262626263, |
|
"acc_norm_stderr": 0.034468977386593325 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5724137931034483, |
|
"acc_stderr": 0.04122737111370332, |
|
"acc_norm": 0.5724137931034483, |
|
"acc_norm_stderr": 0.04122737111370332 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.29411764705882354, |
|
"acc_stderr": 0.04533838195929775, |
|
"acc_norm": 0.29411764705882354, |
|
"acc_norm_stderr": 0.04533838195929775 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5168067226890757, |
|
"acc_stderr": 0.03246013680375308, |
|
"acc_norm": 0.5168067226890757, |
|
"acc_norm_stderr": 0.03246013680375308 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.46923076923076923, |
|
"acc_stderr": 0.025302958890850154, |
|
"acc_norm": 0.46923076923076923, |
|
"acc_norm_stderr": 0.025302958890850154 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.55, |
|
"acc_stderr": 0.04999999999999999, |
|
"acc_norm": 0.55, |
|
"acc_norm_stderr": 0.04999999999999999 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5370370370370371, |
|
"acc_stderr": 0.04820403072760627, |
|
"acc_norm": 0.5370370370370371, |
|
"acc_norm_stderr": 0.04820403072760627 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4236453201970443, |
|
"acc_stderr": 0.034767257476490364, |
|
"acc_norm": 0.4236453201970443, |
|
"acc_norm_stderr": 0.034767257476490364 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5064516129032258, |
|
"acc_stderr": 0.02844163823354051, |
|
"acc_norm": 0.5064516129032258, |
|
"acc_norm_stderr": 0.02844163823354051 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6837606837606838, |
|
"acc_stderr": 0.03046365674734026, |
|
"acc_norm": 0.6837606837606838, |
|
"acc_norm_stderr": 0.03046365674734026 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5169811320754717, |
|
"acc_stderr": 0.030755120364119898, |
|
"acc_norm": 0.5169811320754717, |
|
"acc_norm_stderr": 0.030755120364119898 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5272727272727272, |
|
"acc_stderr": 0.04782001791380061, |
|
"acc_norm": 0.5272727272727272, |
|
"acc_norm_stderr": 0.04782001791380061 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.35185185185185186, |
|
"acc_stderr": 0.029116617606083018, |
|
"acc_norm": 0.35185185185185186, |
|
"acc_norm_stderr": 0.029116617606083018 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.31788079470198677, |
|
"acc_stderr": 0.038020397601079024, |
|
"acc_norm": 0.31788079470198677, |
|
"acc_norm_stderr": 0.038020397601079024 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6716417910447762, |
|
"acc_stderr": 0.033206858897443244, |
|
"acc_norm": 0.6716417910447762, |
|
"acc_norm_stderr": 0.033206858897443244 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4393063583815029, |
|
"acc_stderr": 0.037842719328874674, |
|
"acc_norm": 0.4393063583815029, |
|
"acc_norm_stderr": 0.037842719328874674 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3439153439153439, |
|
"acc_stderr": 0.024464426625596433, |
|
"acc_norm": 0.3439153439153439, |
|
"acc_norm_stderr": 0.024464426625596433 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3680555555555556, |
|
"acc_stderr": 0.04032999053960719, |
|
"acc_norm": 0.3680555555555556, |
|
"acc_norm_stderr": 0.04032999053960719 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145632, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145632 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.69, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.69, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5289017341040463, |
|
"acc_stderr": 0.02687408588351835, |
|
"acc_norm": 0.5289017341040463, |
|
"acc_norm_stderr": 0.02687408588351835 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.43558282208588955, |
|
"acc_stderr": 0.03895632464138937, |
|
"acc_norm": 0.43558282208588955, |
|
"acc_norm_stderr": 0.03895632464138937 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4537037037037037, |
|
"acc_stderr": 0.027701228468542595, |
|
"acc_norm": 0.4537037037037037, |
|
"acc_norm_stderr": 0.027701228468542595 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001974, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001974 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5803108808290155, |
|
"acc_stderr": 0.035615873276858834, |
|
"acc_norm": 0.5803108808290155, |
|
"acc_norm_stderr": 0.035615873276858834 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2982456140350877, |
|
"acc_stderr": 0.04303684033537316, |
|
"acc_norm": 0.2982456140350877, |
|
"acc_norm_stderr": 0.04303684033537316 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5577981651376147, |
|
"acc_stderr": 0.021293613207520202, |
|
"acc_norm": 0.5577981651376147, |
|
"acc_norm_stderr": 0.021293613207520202 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.042163702135578345, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.042163702135578345 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.46078431372549017, |
|
"acc_stderr": 0.028541722692618874, |
|
"acc_norm": 0.46078431372549017, |
|
"acc_norm_stderr": 0.028541722692618874 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956911, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956911 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6694214876033058, |
|
"acc_stderr": 0.04294340845212094, |
|
"acc_norm": 0.6694214876033058, |
|
"acc_norm_stderr": 0.04294340845212094 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4934210526315789, |
|
"acc_stderr": 0.040685900502249704, |
|
"acc_norm": 0.4934210526315789, |
|
"acc_norm_stderr": 0.040685900502249704 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3758169934640523, |
|
"acc_stderr": 0.019594021136577447, |
|
"acc_norm": 0.3758169934640523, |
|
"acc_norm_stderr": 0.019594021136577447 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3404255319148936, |
|
"acc_stderr": 0.028267657482650144, |
|
"acc_norm": 0.3404255319148936, |
|
"acc_norm_stderr": 0.028267657482650144 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.04595091388086298, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.04595091388086298 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3194444444444444, |
|
"acc_stderr": 0.0317987634217685, |
|
"acc_norm": 0.3194444444444444, |
|
"acc_norm_stderr": 0.0317987634217685 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27262569832402234, |
|
"acc_stderr": 0.01489339173524962, |
|
"acc_norm": 0.27262569832402234, |
|
"acc_norm_stderr": 0.01489339173524962 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.54, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.54, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.029408372932278746, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.029408372932278746 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.33877551020408164, |
|
"acc_stderr": 0.03029950656215418, |
|
"acc_norm": 0.33877551020408164, |
|
"acc_norm_stderr": 0.03029950656215418 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.42616033755274263, |
|
"acc_stderr": 0.032190357031317736, |
|
"acc_norm": 0.42616033755274263, |
|
"acc_norm_stderr": 0.032190357031317736 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.28748370273794005, |
|
"acc_stderr": 0.011559337355708502, |
|
"acc_norm": 0.28748370273794005, |
|
"acc_norm_stderr": 0.011559337355708502 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3480392156862745, |
|
"acc_stderr": 0.03343311240488418, |
|
"acc_norm": 0.3480392156862745, |
|
"acc_norm_stderr": 0.03343311240488418 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.48484848484848486, |
|
"acc_stderr": 0.03902551007374448, |
|
"acc_norm": 0.48484848484848486, |
|
"acc_norm_stderr": 0.03902551007374448 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.28886168910648713, |
|
"mc1_stderr": 0.0158663464013843, |
|
"mc2": 0.46502937106374664, |
|
"mc2_stderr": 0.015372195450409798 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4852420306965762, |
|
"acc_stderr": 0.017182864434998564, |
|
"acc_norm": 0.5796930342384888, |
|
"acc_norm_stderr": 0.01697059828117771 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "EnumaInc/llama-8b-ko-slimorca-45000", |
|
"model_sha": "eab90d20ed140a8eabe19d00d3c4af2ed9cffd08", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |