leaderboard-test-results
/
MNCJihun
/Mistral-7B-OpenOrca-eng-kor-combined
/result_2023-10-24 01:28:22.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.2721843003412969, | |
"acc_stderr": 0.013006600406423709, | |
"acc_norm": 0.32849829351535836, | |
"acc_norm_stderr": 0.013724978465537377 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.345947022505477, | |
"acc_stderr": 0.00474703876817253, | |
"acc_norm": 0.42362079267078273, | |
"acc_norm_stderr": 0.004931219148182244 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.4269005847953216, | |
"acc_stderr": 0.03793620616529916, | |
"acc_norm": 0.4269005847953216, | |
"acc_norm_stderr": 0.03793620616529916 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.3592233009708738, | |
"acc_stderr": 0.047504583990416925, | |
"acc_norm": 0.3592233009708738, | |
"acc_norm_stderr": 0.047504583990416925 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.4163473818646232, | |
"acc_stderr": 0.017627948030430298, | |
"acc_norm": 0.4163473818646232, | |
"acc_norm_stderr": 0.017627948030430298 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.2814814814814815, | |
"acc_stderr": 0.03885004245800254, | |
"acc_norm": 0.2814814814814815, | |
"acc_norm_stderr": 0.03885004245800254 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.048241815132442176, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.048241815132442176 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.31063829787234043, | |
"acc_stderr": 0.03025123757921317, | |
"acc_norm": 0.31063829787234043, | |
"acc_norm_stderr": 0.03025123757921317 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.3253012048192771, | |
"acc_stderr": 0.03647168523683227, | |
"acc_norm": 0.3253012048192771, | |
"acc_norm_stderr": 0.03647168523683227 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.3633440514469453, | |
"acc_stderr": 0.027316847674192717, | |
"acc_norm": 0.3633440514469453, | |
"acc_norm_stderr": 0.027316847674192717 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.3542600896860987, | |
"acc_stderr": 0.032100621541349864, | |
"acc_norm": 0.3542600896860987, | |
"acc_norm_stderr": 0.032100621541349864 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.3282442748091603, | |
"acc_stderr": 0.041184385658062976, | |
"acc_norm": 0.3282442748091603, | |
"acc_norm_stderr": 0.041184385658062976 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.35, | |
"acc_stderr": 0.0479372485441102, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.0479372485441102 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.3333333333333333, | |
"acc_stderr": 0.033586181457325226, | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.033586181457325226 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.32413793103448274, | |
"acc_stderr": 0.03900432069185553, | |
"acc_norm": 0.32413793103448274, | |
"acc_norm_stderr": 0.03900432069185553 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.18627450980392157, | |
"acc_stderr": 0.03873958714149352, | |
"acc_norm": 0.18627450980392157, | |
"acc_norm_stderr": 0.03873958714149352 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.3403361344537815, | |
"acc_stderr": 0.030778057422931673, | |
"acc_norm": 0.3403361344537815, | |
"acc_norm_stderr": 0.030778057422931673 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.358974358974359, | |
"acc_stderr": 0.024321738484602357, | |
"acc_norm": 0.358974358974359, | |
"acc_norm_stderr": 0.024321738484602357 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.56, | |
"acc_stderr": 0.0498887651569859, | |
"acc_norm": 0.56, | |
"acc_norm_stderr": 0.0498887651569859 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.046056618647183814, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.3888888888888889, | |
"acc_stderr": 0.0471282125742677, | |
"acc_norm": 0.3888888888888889, | |
"acc_norm_stderr": 0.0471282125742677 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.3103448275862069, | |
"acc_stderr": 0.03255086769970103, | |
"acc_norm": 0.3103448275862069, | |
"acc_norm_stderr": 0.03255086769970103 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.4, | |
"acc_stderr": 0.027869320571664632, | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.027869320571664632 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.5384615384615384, | |
"acc_stderr": 0.03265903381186195, | |
"acc_norm": 0.5384615384615384, | |
"acc_norm_stderr": 0.03265903381186195 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.3622641509433962, | |
"acc_stderr": 0.0295822451283843, | |
"acc_norm": 0.3622641509433962, | |
"acc_norm_stderr": 0.0295822451283843 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.37272727272727274, | |
"acc_stderr": 0.04631381319425464, | |
"acc_norm": 0.37272727272727274, | |
"acc_norm_stderr": 0.04631381319425464 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.31851851851851853, | |
"acc_stderr": 0.02840653309060846, | |
"acc_norm": 0.31851851851851853, | |
"acc_norm_stderr": 0.02840653309060846 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.304635761589404, | |
"acc_stderr": 0.037579499229433426, | |
"acc_norm": 0.304635761589404, | |
"acc_norm_stderr": 0.037579499229433426 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.43781094527363185, | |
"acc_stderr": 0.0350808011219984, | |
"acc_norm": 0.43781094527363185, | |
"acc_norm_stderr": 0.0350808011219984 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.27167630057803466, | |
"acc_stderr": 0.03391750322321659, | |
"acc_norm": 0.27167630057803466, | |
"acc_norm_stderr": 0.03391750322321659 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.34656084656084657, | |
"acc_stderr": 0.024508777521028428, | |
"acc_norm": 0.34656084656084657, | |
"acc_norm_stderr": 0.024508777521028428 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.2777777777777778, | |
"acc_stderr": 0.03745554791462457, | |
"acc_norm": 0.2777777777777778, | |
"acc_norm_stderr": 0.03745554791462457 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.56, | |
"acc_stderr": 0.049888765156985884, | |
"acc_norm": 0.56, | |
"acc_norm_stderr": 0.049888765156985884 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.31213872832369943, | |
"acc_stderr": 0.02494679222527231, | |
"acc_norm": 0.31213872832369943, | |
"acc_norm_stderr": 0.02494679222527231 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.34355828220858897, | |
"acc_stderr": 0.037311335196738925, | |
"acc_norm": 0.34355828220858897, | |
"acc_norm_stderr": 0.037311335196738925 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.3888888888888889, | |
"acc_stderr": 0.02712511551316687, | |
"acc_norm": 0.3888888888888889, | |
"acc_norm_stderr": 0.02712511551316687 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.31, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.48704663212435234, | |
"acc_stderr": 0.03607228061047749, | |
"acc_norm": 0.48704663212435234, | |
"acc_norm_stderr": 0.03607228061047749 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.24561403508771928, | |
"acc_stderr": 0.040493392977481404, | |
"acc_norm": 0.24561403508771928, | |
"acc_norm_stderr": 0.040493392977481404 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.3779816513761468, | |
"acc_stderr": 0.020789187066728113, | |
"acc_norm": 0.3779816513761468, | |
"acc_norm_stderr": 0.020789187066728113 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.3253968253968254, | |
"acc_stderr": 0.04190596438871137, | |
"acc_norm": 0.3253968253968254, | |
"acc_norm_stderr": 0.04190596438871137 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.3627450980392157, | |
"acc_stderr": 0.027530078447110317, | |
"acc_norm": 0.3627450980392157, | |
"acc_norm_stderr": 0.027530078447110317 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.39, | |
"acc_stderr": 0.04902071300001974, | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.04902071300001974 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.5289256198347108, | |
"acc_stderr": 0.04556710331269498, | |
"acc_norm": 0.5289256198347108, | |
"acc_norm_stderr": 0.04556710331269498 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.34868421052631576, | |
"acc_stderr": 0.0387813988879761, | |
"acc_norm": 0.34868421052631576, | |
"acc_norm_stderr": 0.0387813988879761 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.3202614379084967, | |
"acc_stderr": 0.018875682938069443, | |
"acc_norm": 0.3202614379084967, | |
"acc_norm_stderr": 0.018875682938069443 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.3262411347517731, | |
"acc_stderr": 0.02796845304356317, | |
"acc_norm": 0.3262411347517731, | |
"acc_norm_stderr": 0.02796845304356317 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.23214285714285715, | |
"acc_stderr": 0.04007341809755806, | |
"acc_norm": 0.23214285714285715, | |
"acc_norm_stderr": 0.04007341809755806 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.25462962962962965, | |
"acc_stderr": 0.02971127586000533, | |
"acc_norm": 0.25462962962962965, | |
"acc_norm_stderr": 0.02971127586000533 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2569832402234637, | |
"acc_stderr": 0.014614465821966342, | |
"acc_norm": 0.2569832402234637, | |
"acc_norm_stderr": 0.014614465821966342 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.35, | |
"acc_stderr": 0.047937248544110196, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.047937248544110196 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.43, | |
"acc_stderr": 0.04975698519562428, | |
"acc_norm": 0.43, | |
"acc_norm_stderr": 0.04975698519562428 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.3161764705882353, | |
"acc_stderr": 0.02824568739146293, | |
"acc_norm": 0.3161764705882353, | |
"acc_norm_stderr": 0.02824568739146293 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.30612244897959184, | |
"acc_stderr": 0.029504896454595968, | |
"acc_norm": 0.30612244897959184, | |
"acc_norm_stderr": 0.029504896454595968 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.4388185654008439, | |
"acc_stderr": 0.032302649315470375, | |
"acc_norm": 0.4388185654008439, | |
"acc_norm_stderr": 0.032302649315470375 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.31421121251629724, | |
"acc_stderr": 0.011855911587048231, | |
"acc_norm": 0.31421121251629724, | |
"acc_norm_stderr": 0.011855911587048231 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.35784313725490197, | |
"acc_stderr": 0.03364487286088299, | |
"acc_norm": 0.35784313725490197, | |
"acc_norm_stderr": 0.03364487286088299 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.3575757575757576, | |
"acc_stderr": 0.037425970438065864, | |
"acc_norm": 0.3575757575757576, | |
"acc_norm_stderr": 0.037425970438065864 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.29008567931456547, | |
"mc1_stderr": 0.01588623687420952, | |
"mc2": 0.47535947414675184, | |
"mc2_stderr": 0.015845184891705482 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.22195985832349469, | |
"acc_stderr": 0.014287394616821172, | |
"acc_norm": 0.2668240850059032, | |
"acc_norm_stderr": 0.015206575684565883 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined", | |
"model_sha": "5f5dac05ae42c508810fe2dc7d4eef1350c3a1b2", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |