leaderboard-test-results
/
GUSSSSSSSSSSS
/polyglot-ko-12.8b-instruction
/result_2023-11-06 04:46:28.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.24829351535836178, | |
"acc_stderr": 0.012624912868089764, | |
"acc_norm": 0.2858361774744027, | |
"acc_norm_stderr": 0.013203196088537369 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.35371439952200756, | |
"acc_stderr": 0.004771447244095125, | |
"acc_norm": 0.4420434176458873, | |
"acc_norm_stderr": 0.004956147046108963 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.28654970760233917, | |
"acc_stderr": 0.034678266857038245, | |
"acc_norm": 0.28654970760233917, | |
"acc_norm_stderr": 0.034678266857038245 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.21359223300970873, | |
"acc_stderr": 0.040580420156460344, | |
"acc_norm": 0.21359223300970873, | |
"acc_norm_stderr": 0.040580420156460344 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.2681992337164751, | |
"acc_stderr": 0.015842430835269445, | |
"acc_norm": 0.2681992337164751, | |
"acc_norm_stderr": 0.015842430835269445 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.2074074074074074, | |
"acc_stderr": 0.03502553170678317, | |
"acc_norm": 0.2074074074074074, | |
"acc_norm_stderr": 0.03502553170678317 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.27, | |
"acc_stderr": 0.0446196043338474, | |
"acc_norm": 0.27, | |
"acc_norm_stderr": 0.0446196043338474 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.23404255319148937, | |
"acc_stderr": 0.027678452578212373, | |
"acc_norm": 0.23404255319148937, | |
"acc_norm_stderr": 0.027678452578212373 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.2469879518072289, | |
"acc_stderr": 0.03357351982064536, | |
"acc_norm": 0.2469879518072289, | |
"acc_norm_stderr": 0.03357351982064536 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.26366559485530544, | |
"acc_stderr": 0.02502553850053234, | |
"acc_norm": 0.26366559485530544, | |
"acc_norm_stderr": 0.02502553850053234 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.18385650224215247, | |
"acc_stderr": 0.025998379092356513, | |
"acc_norm": 0.18385650224215247, | |
"acc_norm_stderr": 0.025998379092356513 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.21374045801526717, | |
"acc_stderr": 0.0359546161177469, | |
"acc_norm": 0.21374045801526717, | |
"acc_norm_stderr": 0.0359546161177469 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.24, | |
"acc_stderr": 0.042923469599092816, | |
"acc_norm": 0.24, | |
"acc_norm_stderr": 0.042923469599092816 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.25757575757575757, | |
"acc_stderr": 0.03115626951964684, | |
"acc_norm": 0.25757575757575757, | |
"acc_norm_stderr": 0.03115626951964684 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.27586206896551724, | |
"acc_stderr": 0.03724563619774632, | |
"acc_norm": 0.27586206896551724, | |
"acc_norm_stderr": 0.03724563619774632 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.10784313725490197, | |
"acc_stderr": 0.030864282122060136, | |
"acc_norm": 0.10784313725490197, | |
"acc_norm_stderr": 0.030864282122060136 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.24369747899159663, | |
"acc_stderr": 0.02788682807838056, | |
"acc_norm": 0.24369747899159663, | |
"acc_norm_stderr": 0.02788682807838056 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.24102564102564103, | |
"acc_stderr": 0.0216855466653332, | |
"acc_norm": 0.24102564102564103, | |
"acc_norm_stderr": 0.0216855466653332 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.26, | |
"acc_stderr": 0.0440844002276808, | |
"acc_norm": 0.26, | |
"acc_norm_stderr": 0.0440844002276808 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.25, | |
"acc_stderr": 0.04351941398892446, | |
"acc_norm": 0.25, | |
"acc_norm_stderr": 0.04351941398892446 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.2037037037037037, | |
"acc_stderr": 0.03893542518824847, | |
"acc_norm": 0.2037037037037037, | |
"acc_norm_stderr": 0.03893542518824847 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.21182266009852216, | |
"acc_stderr": 0.028748983689941072, | |
"acc_norm": 0.21182266009852216, | |
"acc_norm_stderr": 0.028748983689941072 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.2645161290322581, | |
"acc_stderr": 0.025091892378859275, | |
"acc_norm": 0.2645161290322581, | |
"acc_norm_stderr": 0.025091892378859275 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.2606837606837607, | |
"acc_stderr": 0.028760348956523414, | |
"acc_norm": 0.2606837606837607, | |
"acc_norm_stderr": 0.028760348956523414 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.22641509433962265, | |
"acc_stderr": 0.025757559893106727, | |
"acc_norm": 0.22641509433962265, | |
"acc_norm_stderr": 0.025757559893106727 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.22727272727272727, | |
"acc_stderr": 0.04013964554072775, | |
"acc_norm": 0.22727272727272727, | |
"acc_norm_stderr": 0.04013964554072775 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.26296296296296295, | |
"acc_stderr": 0.026842057873833713, | |
"acc_norm": 0.26296296296296295, | |
"acc_norm_stderr": 0.026842057873833713 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.2582781456953642, | |
"acc_stderr": 0.035737053147634576, | |
"acc_norm": 0.2582781456953642, | |
"acc_norm_stderr": 0.035737053147634576 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.31343283582089554, | |
"acc_stderr": 0.03280188205348642, | |
"acc_norm": 0.31343283582089554, | |
"acc_norm_stderr": 0.03280188205348642 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.23699421965317918, | |
"acc_stderr": 0.03242414757483098, | |
"acc_norm": 0.23699421965317918, | |
"acc_norm_stderr": 0.03242414757483098 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.2671957671957672, | |
"acc_stderr": 0.022789673145776575, | |
"acc_norm": 0.2671957671957672, | |
"acc_norm_stderr": 0.022789673145776575 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.2638888888888889, | |
"acc_stderr": 0.03685651095897532, | |
"acc_norm": 0.2638888888888889, | |
"acc_norm_stderr": 0.03685651095897532 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.19, | |
"acc_stderr": 0.03942772444036622, | |
"acc_norm": 0.19, | |
"acc_norm_stderr": 0.03942772444036622 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.29, | |
"acc_stderr": 0.04560480215720684, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.04560480215720684 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.26011560693641617, | |
"acc_stderr": 0.023618678310069363, | |
"acc_norm": 0.26011560693641617, | |
"acc_norm_stderr": 0.023618678310069363 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.3128834355828221, | |
"acc_stderr": 0.03642914578292404, | |
"acc_norm": 0.3128834355828221, | |
"acc_norm_stderr": 0.03642914578292404 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.3148148148148148, | |
"acc_stderr": 0.025842248700902168, | |
"acc_norm": 0.3148148148148148, | |
"acc_norm_stderr": 0.025842248700902168 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.25, | |
"acc_stderr": 0.04351941398892446, | |
"acc_norm": 0.25, | |
"acc_norm_stderr": 0.04351941398892446 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.24352331606217617, | |
"acc_stderr": 0.030975436386845436, | |
"acc_norm": 0.24352331606217617, | |
"acc_norm_stderr": 0.030975436386845436 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.19298245614035087, | |
"acc_stderr": 0.037124548537213684, | |
"acc_norm": 0.19298245614035087, | |
"acc_norm_stderr": 0.037124548537213684 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.21100917431192662, | |
"acc_stderr": 0.01749392240411265, | |
"acc_norm": 0.21100917431192662, | |
"acc_norm_stderr": 0.01749392240411265 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.16666666666666666, | |
"acc_stderr": 0.03333333333333338, | |
"acc_norm": 0.16666666666666666, | |
"acc_norm_stderr": 0.03333333333333338 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.24509803921568626, | |
"acc_stderr": 0.024630048979824765, | |
"acc_norm": 0.24509803921568626, | |
"acc_norm_stderr": 0.024630048979824765 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.27, | |
"acc_stderr": 0.044619604333847394, | |
"acc_norm": 0.27, | |
"acc_norm_stderr": 0.044619604333847394 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.371900826446281, | |
"acc_stderr": 0.04412015806624503, | |
"acc_norm": 0.371900826446281, | |
"acc_norm_stderr": 0.04412015806624503 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.20394736842105263, | |
"acc_stderr": 0.032790004063100515, | |
"acc_norm": 0.20394736842105263, | |
"acc_norm_stderr": 0.032790004063100515 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.25326797385620914, | |
"acc_stderr": 0.01759348689536683, | |
"acc_norm": 0.25326797385620914, | |
"acc_norm_stderr": 0.01759348689536683 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.2375886524822695, | |
"acc_stderr": 0.025389512552729903, | |
"acc_norm": 0.2375886524822695, | |
"acc_norm_stderr": 0.025389512552729903 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.24107142857142858, | |
"acc_stderr": 0.04059867246952689, | |
"acc_norm": 0.24107142857142858, | |
"acc_norm_stderr": 0.04059867246952689 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.2361111111111111, | |
"acc_stderr": 0.02896370257079103, | |
"acc_norm": 0.2361111111111111, | |
"acc_norm_stderr": 0.02896370257079103 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.28156424581005585, | |
"acc_stderr": 0.015042290171866132, | |
"acc_norm": 0.28156424581005585, | |
"acc_norm_stderr": 0.015042290171866132 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.24, | |
"acc_stderr": 0.042923469599092816, | |
"acc_norm": 0.24, | |
"acc_norm_stderr": 0.042923469599092816 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.27, | |
"acc_stderr": 0.044619604333847394, | |
"acc_norm": 0.27, | |
"acc_norm_stderr": 0.044619604333847394 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.34191176470588236, | |
"acc_stderr": 0.02881472242225418, | |
"acc_norm": 0.34191176470588236, | |
"acc_norm_stderr": 0.02881472242225418 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.18775510204081633, | |
"acc_stderr": 0.02500025603954621, | |
"acc_norm": 0.18775510204081633, | |
"acc_norm_stderr": 0.02500025603954621 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.270042194092827, | |
"acc_stderr": 0.028900721906293426, | |
"acc_norm": 0.270042194092827, | |
"acc_norm_stderr": 0.028900721906293426 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.24185136897001303, | |
"acc_stderr": 0.010936550813827054, | |
"acc_norm": 0.24185136897001303, | |
"acc_norm_stderr": 0.010936550813827054 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.24509803921568626, | |
"acc_stderr": 0.03019028245350195, | |
"acc_norm": 0.24509803921568626, | |
"acc_norm_stderr": 0.03019028245350195 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.2606060606060606, | |
"acc_stderr": 0.03427743175816525, | |
"acc_norm": 0.2606060606060606, | |
"acc_norm_stderr": 0.03427743175816525 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.22399020807833536, | |
"mc1_stderr": 0.014594964329474203, | |
"mc2": 0.4106638009419967, | |
"mc2_stderr": 0.015724386722290755 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.3187721369539551, | |
"acc_stderr": 0.01602142705530959, | |
"acc_norm": 0.38488783943329397, | |
"acc_norm_stderr": 0.01672857970149866 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "GUSSSSSSSSSSS/polyglot-ko-12.8b-instruction", | |
"model_sha": "5dd983e0688b676b814f4b9a02810de2d31dafb3", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |