{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.33532423208191126, "acc_stderr": 0.013796182947785562, "acc_norm": 0.3967576791808874, "acc_norm_stderr": 0.014296513020180639 }, "harness|ko_hellaswag|10": { "acc": 0.35570603465445133, "acc_stderr": 0.004777483159634026, "acc_norm": 0.44891455885281817, "acc_norm_stderr": 0.004963669199433386 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.631578947368421, "acc_stderr": 0.036996580176568775, "acc_norm": 0.631578947368421, "acc_norm_stderr": 0.036996580176568775 }, "harness|ko_mmlu_management|5": { "acc": 0.6893203883495146, "acc_stderr": 0.045821241601615506, "acc_norm": 0.6893203883495146, "acc_norm_stderr": 0.045821241601615506 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5606641123882503, "acc_stderr": 0.017747874245683606, "acc_norm": 0.5606641123882503, "acc_norm_stderr": 0.017747874245683606 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4297872340425532, "acc_stderr": 0.03236214467715564, "acc_norm": 0.4297872340425532, "acc_norm_stderr": 0.03236214467715564 }, "harness|ko_mmlu_virology|5": { "acc": 0.43373493975903615, "acc_stderr": 0.03858158940685515, "acc_norm": 0.43373493975903615, "acc_norm_stderr": 0.03858158940685515 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5176848874598071, "acc_stderr": 0.028380322849077138, "acc_norm": 0.5176848874598071, "acc_norm_stderr": 0.028380322849077138 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.47533632286995514, "acc_stderr": 0.033516951676526276, "acc_norm": 0.47533632286995514, "acc_norm_stderr": 0.033516951676526276 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.549618320610687, "acc_stderr": 0.04363643698524779, "acc_norm": 0.549618320610687, "acc_norm_stderr": 0.04363643698524779 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956914, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956914 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5909090909090909, "acc_stderr": 0.03502975799413007, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.03502975799413007 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006718, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006718 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.542016806722689, "acc_stderr": 0.03236361111951941, "acc_norm": 0.542016806722689, "acc_norm_stderr": 0.03236361111951941 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.47692307692307695, "acc_stderr": 0.025323990861736118, "acc_norm": 0.47692307692307695, "acc_norm_stderr": 0.025323990861736118 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5740740740740741, "acc_stderr": 0.0478034362693679, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.0478034362693679 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.458128078817734, "acc_stderr": 0.03505630140785741, "acc_norm": 0.458128078817734, "acc_norm_stderr": 0.03505630140785741 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.567741935483871, "acc_stderr": 0.028181739720019403, "acc_norm": 0.567741935483871, "acc_norm_stderr": 0.028181739720019403 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7478632478632479, "acc_stderr": 0.02844796547623102, "acc_norm": 0.7478632478632479, "acc_norm_stderr": 0.02844796547623102 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4679245283018868, "acc_stderr": 0.03070948699255654, "acc_norm": 0.4679245283018868, "acc_norm_stderr": 0.03070948699255654 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5727272727272728, "acc_stderr": 0.047381987035454834, "acc_norm": 0.5727272727272728, "acc_norm_stderr": 0.047381987035454834 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948496, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948496 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242742, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242742 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6666666666666666, "acc_stderr": 0.03333333333333334, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.03333333333333334 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.43352601156069365, "acc_stderr": 0.03778621079092055, "acc_norm": 0.43352601156069365, "acc_norm_stderr": 0.03778621079092055 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3862433862433862, "acc_stderr": 0.02507598176760168, "acc_norm": 0.3862433862433862, "acc_norm_stderr": 0.02507598176760168 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4652777777777778, "acc_stderr": 0.04171115858181618, "acc_norm": 0.4652777777777778, "acc_norm_stderr": 0.04171115858181618 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4624277456647399, "acc_stderr": 0.026842985519615375, "acc_norm": 0.4624277456647399, "acc_norm_stderr": 0.026842985519615375 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4601226993865031, "acc_stderr": 0.0391585729143697, "acc_norm": 0.4601226993865031, "acc_norm_stderr": 0.0391585729143697 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.5462962962962963, "acc_stderr": 0.0277012284685426, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.0277012284685426 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5906735751295337, "acc_stderr": 0.03548608168860806, "acc_norm": 0.5906735751295337, "acc_norm_stderr": 0.03548608168860806 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.37719298245614036, "acc_stderr": 0.04559522141958216, "acc_norm": 0.37719298245614036, "acc_norm_stderr": 0.04559522141958216 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.563302752293578, "acc_stderr": 0.021264820158714212, "acc_norm": 0.563302752293578, "acc_norm_stderr": 0.021264820158714212 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5326797385620915, "acc_stderr": 0.02856869975222587, "acc_norm": 0.5326797385620915, "acc_norm_stderr": 0.02856869975222587 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.04345724570292534, "acc_norm": 0.6528925619834711, "acc_norm_stderr": 0.04345724570292534 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.506578947368421, "acc_stderr": 0.040685900502249704, "acc_norm": 0.506578947368421, "acc_norm_stderr": 0.040685900502249704 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.43300653594771243, "acc_stderr": 0.020045442473324227, "acc_norm": 0.43300653594771243, "acc_norm_stderr": 0.020045442473324227 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3333333333333333, "acc_stderr": 0.02812163604063989, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.02812163604063989 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.39814814814814814, "acc_stderr": 0.03338473403207401, "acc_norm": 0.39814814814814814, "acc_norm_stderr": 0.03338473403207401 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808852, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808852 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3897058823529412, "acc_stderr": 0.029624663581159703, "acc_norm": 0.3897058823529412, "acc_norm_stderr": 0.029624663581159703 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5877551020408164, "acc_stderr": 0.0315123604467427, "acc_norm": 0.5877551020408164, "acc_norm_stderr": 0.0315123604467427 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.620253164556962, "acc_stderr": 0.0315918875296585, "acc_norm": 0.620253164556962, "acc_norm_stderr": 0.0315918875296585 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.35267275097783574, "acc_stderr": 0.012203286846053886, "acc_norm": 0.35267275097783574, "acc_norm_stderr": 0.012203286846053886 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5931372549019608, "acc_stderr": 0.03447891136353382, "acc_norm": 0.5931372549019608, "acc_norm_stderr": 0.03447891136353382 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5878787878787879, "acc_stderr": 0.03843566993588718, "acc_norm": 0.5878787878787879, "acc_norm_stderr": 0.03843566993588718 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.25458996328029376, "mc1_stderr": 0.015250117079156463, "mc2": 0.39691229982895626, "mc2_stderr": 0.015650350837213285 }, "harness|ko_commongen_v2|2": { "acc": 0.4344746162927981, "acc_stderr": 0.01704209862082494, "acc_norm": 0.58913813459268, "acc_norm_stderr": 0.016914972767841055 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "4yo1/llama3-pre1-pre2-lora3-mergkit-base2", "model_sha": "f17d5d8e03feb2d7634e99892fcde4aaefe49af4", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }