leaderboard-test-results
/
MNCJ1hun
/Dolphin-Mistral-7B-OP-u1k-ver0.1
/result_2023-10-28 16:43:18.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.35409556313993173, | |
"acc_stderr": 0.01397545412275656, | |
"acc_norm": 0.40784982935153585, | |
"acc_norm_stderr": 0.014361097288449686 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.38189603664608646, | |
"acc_stderr": 0.0048485832436066904, | |
"acc_norm": 0.49661422027484564, | |
"acc_norm_stderr": 0.004989667009372637 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.49707602339181284, | |
"acc_stderr": 0.03834759370936839, | |
"acc_norm": 0.49707602339181284, | |
"acc_norm_stderr": 0.03834759370936839 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.5825242718446602, | |
"acc_stderr": 0.048828405482122375, | |
"acc_norm": 0.5825242718446602, | |
"acc_norm_stderr": 0.048828405482122375 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.4648786717752235, | |
"acc_stderr": 0.01783579880629064, | |
"acc_norm": 0.4648786717752235, | |
"acc_norm_stderr": 0.01783579880629064 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.37037037037037035, | |
"acc_stderr": 0.04171654161354543, | |
"acc_norm": 0.37037037037037035, | |
"acc_norm_stderr": 0.04171654161354543 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.3829787234042553, | |
"acc_stderr": 0.03177821250236922, | |
"acc_norm": 0.3829787234042553, | |
"acc_norm_stderr": 0.03177821250236922 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.42771084337349397, | |
"acc_stderr": 0.038515976837185335, | |
"acc_norm": 0.42771084337349397, | |
"acc_norm_stderr": 0.038515976837185335 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.45016077170418006, | |
"acc_stderr": 0.02825666072336018, | |
"acc_norm": 0.45016077170418006, | |
"acc_norm_stderr": 0.02825666072336018 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.4304932735426009, | |
"acc_stderr": 0.033231973029429394, | |
"acc_norm": 0.4304932735426009, | |
"acc_norm_stderr": 0.033231973029429394 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.4351145038167939, | |
"acc_stderr": 0.04348208051644858, | |
"acc_norm": 0.4351145038167939, | |
"acc_norm_stderr": 0.04348208051644858 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.43, | |
"acc_stderr": 0.049756985195624284, | |
"acc_norm": 0.43, | |
"acc_norm_stderr": 0.049756985195624284 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.494949494949495, | |
"acc_stderr": 0.035621707606254015, | |
"acc_norm": 0.494949494949495, | |
"acc_norm_stderr": 0.035621707606254015 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.41379310344827586, | |
"acc_stderr": 0.04104269211806232, | |
"acc_norm": 0.41379310344827586, | |
"acc_norm_stderr": 0.04104269211806232 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.16666666666666666, | |
"acc_stderr": 0.03708284662416546, | |
"acc_norm": 0.16666666666666666, | |
"acc_norm_stderr": 0.03708284662416546 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.5378151260504201, | |
"acc_stderr": 0.0323854694875898, | |
"acc_norm": 0.5378151260504201, | |
"acc_norm_stderr": 0.0323854694875898 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.4641025641025641, | |
"acc_stderr": 0.025285585990017838, | |
"acc_norm": 0.4641025641025641, | |
"acc_norm_stderr": 0.025285585990017838 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.6, | |
"acc_stderr": 0.049236596391733084, | |
"acc_norm": 0.6, | |
"acc_norm_stderr": 0.049236596391733084 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.33, | |
"acc_stderr": 0.04725815626252605, | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.04725815626252605 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.5185185185185185, | |
"acc_stderr": 0.04830366024635331, | |
"acc_norm": 0.5185185185185185, | |
"acc_norm_stderr": 0.04830366024635331 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.3793103448275862, | |
"acc_stderr": 0.034139638059062345, | |
"acc_norm": 0.3793103448275862, | |
"acc_norm_stderr": 0.034139638059062345 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.47096774193548385, | |
"acc_stderr": 0.028396016402761008, | |
"acc_norm": 0.47096774193548385, | |
"acc_norm_stderr": 0.028396016402761008 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.688034188034188, | |
"acc_stderr": 0.03035152732334494, | |
"acc_norm": 0.688034188034188, | |
"acc_norm_stderr": 0.03035152732334494 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.4679245283018868, | |
"acc_stderr": 0.03070948699255654, | |
"acc_norm": 0.4679245283018868, | |
"acc_norm_stderr": 0.03070948699255654 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.5363636363636364, | |
"acc_stderr": 0.04776449162396197, | |
"acc_norm": 0.5363636363636364, | |
"acc_norm_stderr": 0.04776449162396197 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.29259259259259257, | |
"acc_stderr": 0.02773896963217609, | |
"acc_norm": 0.29259259259259257, | |
"acc_norm_stderr": 0.02773896963217609 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.304635761589404, | |
"acc_stderr": 0.03757949922943343, | |
"acc_norm": 0.304635761589404, | |
"acc_norm_stderr": 0.03757949922943343 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.6169154228855721, | |
"acc_stderr": 0.034375193373382504, | |
"acc_norm": 0.6169154228855721, | |
"acc_norm_stderr": 0.034375193373382504 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.34104046242774566, | |
"acc_stderr": 0.03614665424180826, | |
"acc_norm": 0.34104046242774566, | |
"acc_norm_stderr": 0.03614665424180826 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.36507936507936506, | |
"acc_stderr": 0.024796060602699954, | |
"acc_norm": 0.36507936507936506, | |
"acc_norm_stderr": 0.024796060602699954 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.2847222222222222, | |
"acc_stderr": 0.03773809990686934, | |
"acc_norm": 0.2847222222222222, | |
"acc_norm_stderr": 0.03773809990686934 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.58, | |
"acc_stderr": 0.04960449637488584, | |
"acc_norm": 0.58, | |
"acc_norm_stderr": 0.04960449637488584 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.4797687861271676, | |
"acc_stderr": 0.026897049996382875, | |
"acc_norm": 0.4797687861271676, | |
"acc_norm_stderr": 0.026897049996382875 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.5030674846625767, | |
"acc_stderr": 0.03928297078179663, | |
"acc_norm": 0.5030674846625767, | |
"acc_norm_stderr": 0.03928297078179663 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.4567901234567901, | |
"acc_stderr": 0.027716661650194038, | |
"acc_norm": 0.4567901234567901, | |
"acc_norm_stderr": 0.027716661650194038 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.5077720207253886, | |
"acc_stderr": 0.03608003225569654, | |
"acc_norm": 0.5077720207253886, | |
"acc_norm_stderr": 0.03608003225569654 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2807017543859649, | |
"acc_stderr": 0.04227054451232199, | |
"acc_norm": 0.2807017543859649, | |
"acc_norm_stderr": 0.04227054451232199 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.4935779816513762, | |
"acc_stderr": 0.021435554820013077, | |
"acc_norm": 0.4935779816513762, | |
"acc_norm_stderr": 0.021435554820013077 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.3412698412698413, | |
"acc_stderr": 0.04240799327574924, | |
"acc_norm": 0.3412698412698413, | |
"acc_norm_stderr": 0.04240799327574924 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.5392156862745098, | |
"acc_stderr": 0.028541722692618874, | |
"acc_norm": 0.5392156862745098, | |
"acc_norm_stderr": 0.028541722692618874 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.47, | |
"acc_stderr": 0.050161355804659205, | |
"acc_norm": 0.47, | |
"acc_norm_stderr": 0.050161355804659205 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.5867768595041323, | |
"acc_stderr": 0.04495087843548408, | |
"acc_norm": 0.5867768595041323, | |
"acc_norm_stderr": 0.04495087843548408 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.4407894736842105, | |
"acc_stderr": 0.04040311062490435, | |
"acc_norm": 0.4407894736842105, | |
"acc_norm_stderr": 0.04040311062490435 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.3660130718954248, | |
"acc_stderr": 0.01948802574552966, | |
"acc_norm": 0.3660130718954248, | |
"acc_norm_stderr": 0.01948802574552966 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.36524822695035464, | |
"acc_stderr": 0.028723863853281278, | |
"acc_norm": 0.36524822695035464, | |
"acc_norm_stderr": 0.028723863853281278 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.4375, | |
"acc_stderr": 0.04708567521880525, | |
"acc_norm": 0.4375, | |
"acc_norm_stderr": 0.04708567521880525 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.4212962962962963, | |
"acc_stderr": 0.033674621388960775, | |
"acc_norm": 0.4212962962962963, | |
"acc_norm_stderr": 0.033674621388960775 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.23128491620111732, | |
"acc_stderr": 0.0141022236231526, | |
"acc_norm": 0.23128491620111732, | |
"acc_norm_stderr": 0.0141022236231526 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.43, | |
"acc_stderr": 0.049756985195624284, | |
"acc_norm": 0.43, | |
"acc_norm_stderr": 0.049756985195624284 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.61, | |
"acc_stderr": 0.04902071300001974, | |
"acc_norm": 0.61, | |
"acc_norm_stderr": 0.04902071300001974 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.375, | |
"acc_stderr": 0.029408372932278746, | |
"acc_norm": 0.375, | |
"acc_norm_stderr": 0.029408372932278746 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.5102040816326531, | |
"acc_stderr": 0.03200255347893783, | |
"acc_norm": 0.5102040816326531, | |
"acc_norm_stderr": 0.03200255347893783 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.5907172995780591, | |
"acc_stderr": 0.03200704183359591, | |
"acc_norm": 0.5907172995780591, | |
"acc_norm_stderr": 0.03200704183359591 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.30964797913950454, | |
"acc_stderr": 0.011808598262503318, | |
"acc_norm": 0.30964797913950454, | |
"acc_norm_stderr": 0.011808598262503318 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.5049019607843137, | |
"acc_stderr": 0.03509143375606786, | |
"acc_norm": 0.5049019607843137, | |
"acc_norm_stderr": 0.03509143375606786 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.47878787878787876, | |
"acc_stderr": 0.03900828913737302, | |
"acc_norm": 0.47878787878787876, | |
"acc_norm_stderr": 0.03900828913737302 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.3268053855569155, | |
"mc1_stderr": 0.016419874731135035, | |
"mc2": 0.4937623805683608, | |
"mc2_stderr": 0.015810468549274707 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.4344746162927981, | |
"acc_stderr": 0.017042098620824928, | |
"acc_norm": 0.4793388429752066, | |
"acc_norm_stderr": 0.017175671279836446 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "MNCJ1hun/Dolphin-Mistral-7B-OP-u1k-ver0.1", | |
"model_sha": "4790deb15d0c30a0a8728d8f8419e1694c21eb1a", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |