|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3165529010238908, |
|
"acc_stderr": 0.013592431519068082, |
|
"acc_norm": 0.3856655290102389, |
|
"acc_norm_stderr": 0.014224250973257174 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.36456881099382593, |
|
"acc_stderr": 0.004803253812881047, |
|
"acc_norm": 0.4691296554471221, |
|
"acc_norm_stderr": 0.004980262025472489 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.43859649122807015, |
|
"acc_stderr": 0.038057975055904594, |
|
"acc_norm": 0.43859649122807015, |
|
"acc_norm_stderr": 0.038057975055904594 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5145631067961165, |
|
"acc_stderr": 0.04948637324026637, |
|
"acc_norm": 0.5145631067961165, |
|
"acc_norm_stderr": 0.04948637324026637 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.44316730523627074, |
|
"acc_stderr": 0.01776408503534839, |
|
"acc_norm": 0.44316730523627074, |
|
"acc_norm_stderr": 0.01776408503534839 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.34074074074074073, |
|
"acc_stderr": 0.04094376269996794, |
|
"acc_norm": 0.34074074074074073, |
|
"acc_norm_stderr": 0.04094376269996794 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421255, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421255 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.39574468085106385, |
|
"acc_stderr": 0.03196758697835363, |
|
"acc_norm": 0.39574468085106385, |
|
"acc_norm_stderr": 0.03196758697835363 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.43373493975903615, |
|
"acc_stderr": 0.03858158940685515, |
|
"acc_norm": 0.43373493975903615, |
|
"acc_norm_stderr": 0.03858158940685515 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3890675241157556, |
|
"acc_stderr": 0.027690337536485376, |
|
"acc_norm": 0.3890675241157556, |
|
"acc_norm_stderr": 0.027690337536485376 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4260089686098655, |
|
"acc_stderr": 0.0331883328621728, |
|
"acc_norm": 0.4260089686098655, |
|
"acc_norm_stderr": 0.0331883328621728 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.42748091603053434, |
|
"acc_stderr": 0.04338920305792401, |
|
"acc_norm": 0.42748091603053434, |
|
"acc_norm_stderr": 0.04338920305792401 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.48484848484848486, |
|
"acc_stderr": 0.03560716516531061, |
|
"acc_norm": 0.48484848484848486, |
|
"acc_norm_stderr": 0.03560716516531061 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.36551724137931035, |
|
"acc_stderr": 0.04013124195424386, |
|
"acc_norm": 0.36551724137931035, |
|
"acc_norm_stderr": 0.04013124195424386 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237656, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237656 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3949579831932773, |
|
"acc_stderr": 0.031753678460966245, |
|
"acc_norm": 0.3949579831932773, |
|
"acc_norm_stderr": 0.031753678460966245 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4025641025641026, |
|
"acc_stderr": 0.024864995159767766, |
|
"acc_norm": 0.4025641025641026, |
|
"acc_norm_stderr": 0.024864995159767766 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.04833682445228318, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04833682445228318 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3891625615763547, |
|
"acc_stderr": 0.03430462416103872, |
|
"acc_norm": 0.3891625615763547, |
|
"acc_norm_stderr": 0.03430462416103872 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.43548387096774194, |
|
"acc_stderr": 0.02820622559150274, |
|
"acc_norm": 0.43548387096774194, |
|
"acc_norm_stderr": 0.02820622559150274 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6367521367521367, |
|
"acc_stderr": 0.03150712523091265, |
|
"acc_norm": 0.6367521367521367, |
|
"acc_norm_stderr": 0.03150712523091265 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.39245283018867927, |
|
"acc_stderr": 0.03005258057955784, |
|
"acc_norm": 0.39245283018867927, |
|
"acc_norm_stderr": 0.03005258057955784 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4636363636363636, |
|
"acc_stderr": 0.047764491623961985, |
|
"acc_norm": 0.4636363636363636, |
|
"acc_norm_stderr": 0.047764491623961985 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.02831753349606648, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.02831753349606648 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.037101857261199946, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.037101857261199946 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5074626865671642, |
|
"acc_stderr": 0.03535140084276719, |
|
"acc_norm": 0.5074626865671642, |
|
"acc_norm_stderr": 0.03535140084276719 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3179190751445087, |
|
"acc_stderr": 0.03550683989165582, |
|
"acc_norm": 0.3179190751445087, |
|
"acc_norm_stderr": 0.03550683989165582 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.31746031746031744, |
|
"acc_stderr": 0.02397386199899207, |
|
"acc_norm": 0.31746031746031744, |
|
"acc_norm_stderr": 0.02397386199899207 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2847222222222222, |
|
"acc_stderr": 0.03773809990686934, |
|
"acc_norm": 0.2847222222222222, |
|
"acc_norm_stderr": 0.03773809990686934 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.049888765156985884, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.049888765156985884 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.42485549132947975, |
|
"acc_stderr": 0.026613350840261736, |
|
"acc_norm": 0.42485549132947975, |
|
"acc_norm_stderr": 0.026613350840261736 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4110429447852761, |
|
"acc_stderr": 0.038656978537853624, |
|
"acc_norm": 0.4110429447852761, |
|
"acc_norm_stderr": 0.038656978537853624 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.42901234567901236, |
|
"acc_stderr": 0.027538925613470863, |
|
"acc_norm": 0.42901234567901236, |
|
"acc_norm_stderr": 0.027538925613470863 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.43005181347150256, |
|
"acc_stderr": 0.03572954333144808, |
|
"acc_norm": 0.43005181347150256, |
|
"acc_norm_stderr": 0.03572954333144808 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.30701754385964913, |
|
"acc_stderr": 0.043391383225798594, |
|
"acc_norm": 0.30701754385964913, |
|
"acc_norm_stderr": 0.043391383225798594 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.44770642201834865, |
|
"acc_stderr": 0.021319754962425462, |
|
"acc_norm": 0.44770642201834865, |
|
"acc_norm_stderr": 0.021319754962425462 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.29365079365079366, |
|
"acc_stderr": 0.04073524322147126, |
|
"acc_norm": 0.29365079365079366, |
|
"acc_norm_stderr": 0.04073524322147126 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.43137254901960786, |
|
"acc_stderr": 0.02835895631342355, |
|
"acc_norm": 0.43137254901960786, |
|
"acc_norm_stderr": 0.02835895631342355 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6776859504132231, |
|
"acc_stderr": 0.042664163633521685, |
|
"acc_norm": 0.6776859504132231, |
|
"acc_norm_stderr": 0.042664163633521685 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.40131578947368424, |
|
"acc_stderr": 0.03988903703336285, |
|
"acc_norm": 0.40131578947368424, |
|
"acc_norm_stderr": 0.03988903703336285 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.31699346405228757, |
|
"acc_stderr": 0.018824219512706214, |
|
"acc_norm": 0.31699346405228757, |
|
"acc_norm_stderr": 0.018824219512706214 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.32978723404255317, |
|
"acc_stderr": 0.0280459469420424, |
|
"acc_norm": 0.32978723404255317, |
|
"acc_norm_stderr": 0.0280459469420424 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.33035714285714285, |
|
"acc_stderr": 0.04464285714285714, |
|
"acc_norm": 0.33035714285714285, |
|
"acc_norm_stderr": 0.04464285714285714 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.03293377139415191, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.03293377139415191 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.014333522059217892, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.014333522059217892 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.049888765156985884, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.049888765156985884 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3897058823529412, |
|
"acc_stderr": 0.0296246635811597, |
|
"acc_norm": 0.3897058823529412, |
|
"acc_norm_stderr": 0.0296246635811597 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.43673469387755104, |
|
"acc_stderr": 0.03175195237583322, |
|
"acc_norm": 0.43673469387755104, |
|
"acc_norm_stderr": 0.03175195237583322 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.4641350210970464, |
|
"acc_stderr": 0.03246338898055659, |
|
"acc_norm": 0.4641350210970464, |
|
"acc_norm_stderr": 0.03246338898055659 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3005215123859192, |
|
"acc_stderr": 0.011709918883039131, |
|
"acc_norm": 0.3005215123859192, |
|
"acc_norm_stderr": 0.011709918883039131 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.03308611113236434, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.03308611113236434 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.32727272727272727, |
|
"acc_stderr": 0.03663974994391243, |
|
"acc_norm": 0.32727272727272727, |
|
"acc_norm_stderr": 0.03663974994391243 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2607099143206854, |
|
"mc1_stderr": 0.015368841620766379, |
|
"mc2": 0.44720320938084884, |
|
"mc2_stderr": 0.015529246019817096 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.40613931523022434, |
|
"acc_stderr": 0.016884749503191392, |
|
"acc_norm": 0.4639905548996458, |
|
"acc_norm_stderr": 0.017145715365486657 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "LI-ST/Mistral-7B-ko-v0.5", |
|
"model_sha": "b20a0853eaf043c7271df8b634b0fc5983b70b72", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |