Sean Cho
test data
ec0015a
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.38139931740614336,
"acc_stderr": 0.014194389086685247,
"acc_norm": 0.45051194539249145,
"acc_norm_stderr": 0.014539646098471627
},
"harness|ko_hellaswag|10": {
"acc": 0.41963752240589525,
"acc_stderr": 0.004924910433106359,
"acc_norm": 0.566122286397132,
"acc_norm_stderr": 0.004945956744943813
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5146198830409356,
"acc_stderr": 0.038331852752130254,
"acc_norm": 0.5146198830409356,
"acc_norm_stderr": 0.038331852752130254
},
"harness|ko_mmlu_management|5": {
"acc": 0.4368932038834951,
"acc_stderr": 0.049111471073657764,
"acc_norm": 0.4368932038834951,
"acc_norm_stderr": 0.049111471073657764
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5351213282247765,
"acc_stderr": 0.017835798806290642,
"acc_norm": 0.5351213282247765,
"acc_norm_stderr": 0.017835798806290642
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.5037037037037037,
"acc_stderr": 0.043192236258113324,
"acc_norm": 0.5037037037037037,
"acc_norm_stderr": 0.043192236258113324
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3872340425531915,
"acc_stderr": 0.03184389265339526,
"acc_norm": 0.3872340425531915,
"acc_norm_stderr": 0.03184389265339526
},
"harness|ko_mmlu_virology|5": {
"acc": 0.41566265060240964,
"acc_stderr": 0.03836722176598052,
"acc_norm": 0.41566265060240964,
"acc_norm_stderr": 0.03836722176598052
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4983922829581994,
"acc_stderr": 0.02839794490780661,
"acc_norm": 0.4983922829581994,
"acc_norm_stderr": 0.02839794490780661
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5022421524663677,
"acc_stderr": 0.033557465352232634,
"acc_norm": 0.5022421524663677,
"acc_norm_stderr": 0.033557465352232634
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5114503816793893,
"acc_stderr": 0.043841400240780176,
"acc_norm": 0.5114503816793893,
"acc_norm_stderr": 0.043841400240780176
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.51010101010101,
"acc_stderr": 0.035616254886737454,
"acc_norm": 0.51010101010101,
"acc_norm_stderr": 0.035616254886737454
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3724137931034483,
"acc_stderr": 0.0402873153294756,
"acc_norm": 0.3724137931034483,
"acc_norm_stderr": 0.0402873153294756
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.18627450980392157,
"acc_stderr": 0.038739587141493524,
"acc_norm": 0.18627450980392157,
"acc_norm_stderr": 0.038739587141493524
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.4327731092436975,
"acc_stderr": 0.03218358107742613,
"acc_norm": 0.4327731092436975,
"acc_norm_stderr": 0.03218358107742613
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4,
"acc_stderr": 0.024838811988033158,
"acc_norm": 0.4,
"acc_norm_stderr": 0.024838811988033158
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.55,
"acc_stderr": 0.049999999999999996,
"acc_norm": 0.55,
"acc_norm_stderr": 0.049999999999999996
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5,
"acc_stderr": 0.04833682445228318,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04833682445228318
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4088669950738916,
"acc_stderr": 0.034590588158832314,
"acc_norm": 0.4088669950738916,
"acc_norm_stderr": 0.034590588158832314
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.43870967741935485,
"acc_stderr": 0.028229497320317216,
"acc_norm": 0.43870967741935485,
"acc_norm_stderr": 0.028229497320317216
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6153846153846154,
"acc_stderr": 0.03187195347942466,
"acc_norm": 0.6153846153846154,
"acc_norm_stderr": 0.03187195347942466
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.42641509433962266,
"acc_stderr": 0.03043779434298305,
"acc_norm": 0.42641509433962266,
"acc_norm_stderr": 0.03043779434298305
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4636363636363636,
"acc_stderr": 0.047764491623961985,
"acc_norm": 0.4636363636363636,
"acc_norm_stderr": 0.047764491623961985
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.02696242432507382,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.02696242432507382
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.03802039760107903,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.03802039760107903
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6019900497512438,
"acc_stderr": 0.034611994290400135,
"acc_norm": 0.6019900497512438,
"acc_norm_stderr": 0.034611994290400135
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.36416184971098264,
"acc_stderr": 0.03669072477416906,
"acc_norm": 0.36416184971098264,
"acc_norm_stderr": 0.03669072477416906
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2751322751322751,
"acc_stderr": 0.02300008685906864,
"acc_norm": 0.2751322751322751,
"acc_norm_stderr": 0.02300008685906864
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3472222222222222,
"acc_stderr": 0.03981240543717862,
"acc_norm": 0.3472222222222222,
"acc_norm_stderr": 0.03981240543717862
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.64,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.64,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.48554913294797686,
"acc_stderr": 0.02690784985628254,
"acc_norm": 0.48554913294797686,
"acc_norm_stderr": 0.02690784985628254
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.38650306748466257,
"acc_stderr": 0.038258255488486076,
"acc_norm": 0.38650306748466257,
"acc_norm_stderr": 0.038258255488486076
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4537037037037037,
"acc_stderr": 0.027701228468542602,
"acc_norm": 0.4537037037037037,
"acc_norm_stderr": 0.027701228468542602
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.43523316062176165,
"acc_stderr": 0.03578038165008586,
"acc_norm": 0.43523316062176165,
"acc_norm_stderr": 0.03578038165008586
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2719298245614035,
"acc_stderr": 0.04185774424022058,
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.04185774424022058
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5339449541284403,
"acc_stderr": 0.02138786335035399,
"acc_norm": 0.5339449541284403,
"acc_norm_stderr": 0.02138786335035399
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.040061680838488795,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.040061680838488795
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4215686274509804,
"acc_stderr": 0.02827549015679143,
"acc_norm": 0.4215686274509804,
"acc_norm_stderr": 0.02827549015679143
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6115702479338843,
"acc_stderr": 0.04449270350068382,
"acc_norm": 0.6115702479338843,
"acc_norm_stderr": 0.04449270350068382
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.39473684210526316,
"acc_stderr": 0.039777499346220734,
"acc_norm": 0.39473684210526316,
"acc_norm_stderr": 0.039777499346220734
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3839869281045752,
"acc_stderr": 0.019675808135281508,
"acc_norm": 0.3839869281045752,
"acc_norm_stderr": 0.019675808135281508
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2978723404255319,
"acc_stderr": 0.027281608344469414,
"acc_norm": 0.2978723404255319,
"acc_norm_stderr": 0.027281608344469414
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.21428571428571427,
"acc_stderr": 0.03894641120044791,
"acc_norm": 0.21428571428571427,
"acc_norm_stderr": 0.03894641120044791
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2638888888888889,
"acc_stderr": 0.030058202704309846,
"acc_norm": 0.2638888888888889,
"acc_norm_stderr": 0.030058202704309846
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.41911764705882354,
"acc_stderr": 0.029972807170464622,
"acc_norm": 0.41911764705882354,
"acc_norm_stderr": 0.029972807170464622
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.47346938775510206,
"acc_stderr": 0.03196412734523272,
"acc_norm": 0.47346938775510206,
"acc_norm_stderr": 0.03196412734523272
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5485232067510548,
"acc_stderr": 0.032393600173974704,
"acc_norm": 0.5485232067510548,
"acc_norm_stderr": 0.032393600173974704
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.29595827900912647,
"acc_stderr": 0.011658518525277054,
"acc_norm": 0.29595827900912647,
"acc_norm_stderr": 0.011658518525277054
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4068627450980392,
"acc_stderr": 0.03447891136353382,
"acc_norm": 0.4068627450980392,
"acc_norm_stderr": 0.03447891136353382
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5272727272727272,
"acc_stderr": 0.03898531605579418,
"acc_norm": 0.5272727272727272,
"acc_norm_stderr": 0.03898531605579418
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.27050183598531213,
"mc1_stderr": 0.015550778332842885,
"mc2": 0.42557508687226114,
"mc2_stderr": 0.014810504388914819
},
"harness|ko_commongen_v2|2": {
"acc": 0.525383707201889,
"acc_stderr": 0.017168187201429253,
"acc_norm": 0.6092089728453365,
"acc_norm_stderr": 0.016775298465108265
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "GAI-LLM/llama-2-koen-13b-mixed-v11_2",
"model_sha": "da7c55c72f9f911022709d710972972beef327a4",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}