|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.37627986348122866, |
|
"acc_stderr": 0.014157022555407166, |
|
"acc_norm": 0.4300341296928328, |
|
"acc_norm_stderr": 0.014467631559137998 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.407787293367855, |
|
"acc_stderr": 0.004904189257891276, |
|
"acc_norm": 0.5450109539932284, |
|
"acc_norm_stderr": 0.004969521827957934 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5321637426900585, |
|
"acc_stderr": 0.03826882417660368, |
|
"acc_norm": 0.5321637426900585, |
|
"acc_norm_stderr": 0.03826882417660368 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.4563106796116505, |
|
"acc_stderr": 0.049318019942204146, |
|
"acc_norm": 0.4563106796116505, |
|
"acc_norm_stderr": 0.049318019942204146 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5019157088122606, |
|
"acc_stderr": 0.017879832259026677, |
|
"acc_norm": 0.5019157088122606, |
|
"acc_norm_stderr": 0.017879832259026677 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.35555555555555557, |
|
"acc_stderr": 0.04135176749720386, |
|
"acc_norm": 0.35555555555555557, |
|
"acc_norm_stderr": 0.04135176749720386 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3574468085106383, |
|
"acc_stderr": 0.03132941789476425, |
|
"acc_norm": 0.3574468085106383, |
|
"acc_norm_stderr": 0.03132941789476425 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.43373493975903615, |
|
"acc_stderr": 0.03858158940685515, |
|
"acc_norm": 0.43373493975903615, |
|
"acc_norm_stderr": 0.03858158940685515 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.47266881028938906, |
|
"acc_stderr": 0.02835563356832818, |
|
"acc_norm": 0.47266881028938906, |
|
"acc_norm_stderr": 0.02835563356832818 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3542600896860987, |
|
"acc_stderr": 0.03210062154134986, |
|
"acc_norm": 0.3542600896860987, |
|
"acc_norm_stderr": 0.03210062154134986 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.48854961832061067, |
|
"acc_stderr": 0.043841400240780176, |
|
"acc_norm": 0.48854961832061067, |
|
"acc_norm_stderr": 0.043841400240780176 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5252525252525253, |
|
"acc_stderr": 0.03557806245087314, |
|
"acc_norm": 0.5252525252525253, |
|
"acc_norm_stderr": 0.03557806245087314 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04082482904638628, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04082482904638628 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.04440521906179327, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.04440521906179327 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.37815126050420167, |
|
"acc_stderr": 0.031499305777849054, |
|
"acc_norm": 0.37815126050420167, |
|
"acc_norm_stderr": 0.031499305777849054 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.36666666666666664, |
|
"acc_stderr": 0.024433016466052445, |
|
"acc_norm": 0.36666666666666664, |
|
"acc_norm_stderr": 0.024433016466052445 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.47, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.47, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.04750077341199984, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.04750077341199984 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3891625615763547, |
|
"acc_stderr": 0.03430462416103872, |
|
"acc_norm": 0.3891625615763547, |
|
"acc_norm_stderr": 0.03430462416103872 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.45483870967741935, |
|
"acc_stderr": 0.028327743091561056, |
|
"acc_norm": 0.45483870967741935, |
|
"acc_norm_stderr": 0.028327743091561056 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5811965811965812, |
|
"acc_stderr": 0.03232128912157792, |
|
"acc_norm": 0.5811965811965812, |
|
"acc_norm_stderr": 0.03232128912157792 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.44528301886792454, |
|
"acc_stderr": 0.03058805297427065, |
|
"acc_norm": 0.44528301886792454, |
|
"acc_norm_stderr": 0.03058805297427065 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4727272727272727, |
|
"acc_stderr": 0.04782001791380063, |
|
"acc_norm": 0.4727272727272727, |
|
"acc_norm_stderr": 0.04782001791380063 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25555555555555554, |
|
"acc_stderr": 0.026593939101844072, |
|
"acc_norm": 0.25555555555555554, |
|
"acc_norm_stderr": 0.026593939101844072 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.037101857261199946, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.037101857261199946 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5771144278606966, |
|
"acc_stderr": 0.034932317774212816, |
|
"acc_norm": 0.5771144278606966, |
|
"acc_norm_stderr": 0.034932317774212816 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3468208092485549, |
|
"acc_stderr": 0.03629146670159664, |
|
"acc_norm": 0.3468208092485549, |
|
"acc_norm_stderr": 0.03629146670159664 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.29894179894179895, |
|
"acc_stderr": 0.023577604791655795, |
|
"acc_norm": 0.29894179894179895, |
|
"acc_norm_stderr": 0.023577604791655795 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3263888888888889, |
|
"acc_stderr": 0.03921067198982266, |
|
"acc_norm": 0.3263888888888889, |
|
"acc_norm_stderr": 0.03921067198982266 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.04163331998932269, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.04163331998932269 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4190751445086705, |
|
"acc_stderr": 0.026564178111422622, |
|
"acc_norm": 0.4190751445086705, |
|
"acc_norm_stderr": 0.026564178111422622 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3496932515337423, |
|
"acc_stderr": 0.037466683254700206, |
|
"acc_norm": 0.3496932515337423, |
|
"acc_norm_stderr": 0.037466683254700206 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.44753086419753085, |
|
"acc_stderr": 0.02766713856942271, |
|
"acc_norm": 0.44753086419753085, |
|
"acc_norm_stderr": 0.02766713856942271 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768078, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768078 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.42487046632124353, |
|
"acc_stderr": 0.0356747133521254, |
|
"acc_norm": 0.42487046632124353, |
|
"acc_norm_stderr": 0.0356747133521254 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2807017543859649, |
|
"acc_stderr": 0.042270544512321984, |
|
"acc_norm": 0.2807017543859649, |
|
"acc_norm_stderr": 0.042270544512321984 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.46055045871559636, |
|
"acc_stderr": 0.021370494609995096, |
|
"acc_norm": 0.46055045871559636, |
|
"acc_norm_stderr": 0.021370494609995096 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2698412698412698, |
|
"acc_stderr": 0.03970158273235173, |
|
"acc_norm": 0.2698412698412698, |
|
"acc_norm_stderr": 0.03970158273235173 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.39869281045751637, |
|
"acc_stderr": 0.02803609227389177, |
|
"acc_norm": 0.39869281045751637, |
|
"acc_norm_stderr": 0.02803609227389177 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5289256198347108, |
|
"acc_stderr": 0.04556710331269498, |
|
"acc_norm": 0.5289256198347108, |
|
"acc_norm_stderr": 0.04556710331269498 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.39473684210526316, |
|
"acc_stderr": 0.039777499346220734, |
|
"acc_norm": 0.39473684210526316, |
|
"acc_norm_stderr": 0.039777499346220734 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.315359477124183, |
|
"acc_stderr": 0.018798086284886887, |
|
"acc_norm": 0.315359477124183, |
|
"acc_norm_stderr": 0.018798086284886887 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.30141843971631205, |
|
"acc_stderr": 0.027374128882631157, |
|
"acc_norm": 0.30141843971631205, |
|
"acc_norm_stderr": 0.027374128882631157 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.038946411200447915, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.038946411200447915 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2916666666666667, |
|
"acc_stderr": 0.030998666304560534, |
|
"acc_norm": 0.2916666666666667, |
|
"acc_norm_stderr": 0.030998666304560534 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720684, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.04975698519562428, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.04975698519562428 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.22058823529411764, |
|
"acc_stderr": 0.025187786660227276, |
|
"acc_norm": 0.22058823529411764, |
|
"acc_norm_stderr": 0.025187786660227276 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.44081632653061226, |
|
"acc_stderr": 0.03178419114175363, |
|
"acc_norm": 0.44081632653061226, |
|
"acc_norm_stderr": 0.03178419114175363 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5063291139240507, |
|
"acc_stderr": 0.03254462010767859, |
|
"acc_norm": 0.5063291139240507, |
|
"acc_norm_stderr": 0.03254462010767859 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2920469361147327, |
|
"acc_stderr": 0.011613349136271824, |
|
"acc_norm": 0.2920469361147327, |
|
"acc_norm_stderr": 0.011613349136271824 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.4019607843137255, |
|
"acc_stderr": 0.03441190023482465, |
|
"acc_norm": 0.4019607843137255, |
|
"acc_norm_stderr": 0.03441190023482465 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.4666666666666667, |
|
"acc_stderr": 0.03895658065271846, |
|
"acc_norm": 0.4666666666666667, |
|
"acc_norm_stderr": 0.03895658065271846 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.25703794369645044, |
|
"mc1_stderr": 0.01529807750948508, |
|
"mc2": 0.42398241596571024, |
|
"mc2_stderr": 0.014807345195706319 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4793388429752066, |
|
"acc_stderr": 0.017175671279836442, |
|
"acc_norm": 0.5619834710743802, |
|
"acc_norm_stderr": 0.01705775370216029 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "GAI-LLM/ko-en-llama2-13b-mixed-v4", |
|
"model_sha": "2d3a564cd23d0e97bb0f3354a148ef57e313661a", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |