leaderboard-test-results
/
MNCJ1hun
/MIstral-11B-Omni-OP-u1k-ver0.1
/result_2023-10-29 00:20:22.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.3575085324232082, | |
"acc_stderr": 0.014005494275916576, | |
"acc_norm": 0.42150170648464164, | |
"acc_norm_stderr": 0.014430197069326028 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.38279227245568614, | |
"acc_stderr": 0.004850748687859933, | |
"acc_norm": 0.4874526986656045, | |
"acc_norm_stderr": 0.004988210033832016 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.5029239766081871, | |
"acc_stderr": 0.03834759370936839, | |
"acc_norm": 0.5029239766081871, | |
"acc_norm_stderr": 0.03834759370936839 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.5728155339805825, | |
"acc_stderr": 0.04897957737781168, | |
"acc_norm": 0.5728155339805825, | |
"acc_norm_stderr": 0.04897957737781168 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.4623243933588761, | |
"acc_stderr": 0.017829131764287198, | |
"acc_norm": 0.4623243933588761, | |
"acc_norm_stderr": 0.017829131764287198 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.34814814814814815, | |
"acc_stderr": 0.041153246103369526, | |
"acc_norm": 0.34814814814814815, | |
"acc_norm_stderr": 0.041153246103369526 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.27, | |
"acc_stderr": 0.04461960433384741, | |
"acc_norm": 0.27, | |
"acc_norm_stderr": 0.04461960433384741 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.4, | |
"acc_stderr": 0.032025630761017346, | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.032025630761017346 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.41566265060240964, | |
"acc_stderr": 0.038367221765980515, | |
"acc_norm": 0.41566265060240964, | |
"acc_norm_stderr": 0.038367221765980515 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.47266881028938906, | |
"acc_stderr": 0.02835563356832818, | |
"acc_norm": 0.47266881028938906, | |
"acc_norm_stderr": 0.02835563356832818 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.4618834080717489, | |
"acc_stderr": 0.033460150119732274, | |
"acc_norm": 0.4618834080717489, | |
"acc_norm_stderr": 0.033460150119732274 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.44274809160305345, | |
"acc_stderr": 0.04356447202665069, | |
"acc_norm": 0.44274809160305345, | |
"acc_norm_stderr": 0.04356447202665069 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.4, | |
"acc_stderr": 0.049236596391733084, | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.049236596391733084 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.5656565656565656, | |
"acc_stderr": 0.03531505879359182, | |
"acc_norm": 0.5656565656565656, | |
"acc_norm_stderr": 0.03531505879359182 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.45517241379310347, | |
"acc_stderr": 0.04149886942192117, | |
"acc_norm": 0.45517241379310347, | |
"acc_norm_stderr": 0.04149886942192117 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.17647058823529413, | |
"acc_stderr": 0.03793281185307807, | |
"acc_norm": 0.17647058823529413, | |
"acc_norm_stderr": 0.03793281185307807 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.46638655462184875, | |
"acc_stderr": 0.03240501447690071, | |
"acc_norm": 0.46638655462184875, | |
"acc_norm_stderr": 0.03240501447690071 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.4282051282051282, | |
"acc_stderr": 0.02508830145469484, | |
"acc_norm": 0.4282051282051282, | |
"acc_norm_stderr": 0.02508830145469484 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.7, | |
"acc_stderr": 0.046056618647183814, | |
"acc_norm": 0.7, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.28, | |
"acc_stderr": 0.04512608598542128, | |
"acc_norm": 0.28, | |
"acc_norm_stderr": 0.04512608598542128 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.49074074074074076, | |
"acc_stderr": 0.04832853553437055, | |
"acc_norm": 0.49074074074074076, | |
"acc_norm_stderr": 0.04832853553437055 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.3793103448275862, | |
"acc_stderr": 0.034139638059062345, | |
"acc_norm": 0.3793103448275862, | |
"acc_norm_stderr": 0.034139638059062345 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.432258064516129, | |
"acc_stderr": 0.028181739720019413, | |
"acc_norm": 0.432258064516129, | |
"acc_norm_stderr": 0.028181739720019413 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.6923076923076923, | |
"acc_stderr": 0.030236389942173095, | |
"acc_norm": 0.6923076923076923, | |
"acc_norm_stderr": 0.030236389942173095 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.4679245283018868, | |
"acc_stderr": 0.030709486992556538, | |
"acc_norm": 0.4679245283018868, | |
"acc_norm_stderr": 0.030709486992556538 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.5363636363636364, | |
"acc_stderr": 0.04776449162396197, | |
"acc_norm": 0.5363636363636364, | |
"acc_norm_stderr": 0.04776449162396197 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.2814814814814815, | |
"acc_stderr": 0.027420019350945277, | |
"acc_norm": 0.2814814814814815, | |
"acc_norm_stderr": 0.027420019350945277 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.2781456953642384, | |
"acc_stderr": 0.03658603262763744, | |
"acc_norm": 0.2781456953642384, | |
"acc_norm_stderr": 0.03658603262763744 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.6019900497512438, | |
"acc_stderr": 0.03461199429040013, | |
"acc_norm": 0.6019900497512438, | |
"acc_norm_stderr": 0.03461199429040013 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.3815028901734104, | |
"acc_stderr": 0.03703851193099521, | |
"acc_norm": 0.3815028901734104, | |
"acc_norm_stderr": 0.03703851193099521 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.3333333333333333, | |
"acc_stderr": 0.0242785680243077, | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.0242785680243077 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.3263888888888889, | |
"acc_stderr": 0.03921067198982266, | |
"acc_norm": 0.3263888888888889, | |
"acc_norm_stderr": 0.03921067198982266 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.35, | |
"acc_stderr": 0.0479372485441102, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.0479372485441102 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.6, | |
"acc_stderr": 0.049236596391733084, | |
"acc_norm": 0.6, | |
"acc_norm_stderr": 0.049236596391733084 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.4913294797687861, | |
"acc_stderr": 0.026915047355369818, | |
"acc_norm": 0.4913294797687861, | |
"acc_norm_stderr": 0.026915047355369818 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.4601226993865031, | |
"acc_stderr": 0.03915857291436971, | |
"acc_norm": 0.4601226993865031, | |
"acc_norm_stderr": 0.03915857291436971 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.43209876543209874, | |
"acc_stderr": 0.02756301097160668, | |
"acc_norm": 0.43209876543209874, | |
"acc_norm_stderr": 0.02756301097160668 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.41, | |
"acc_stderr": 0.049431107042371025, | |
"acc_norm": 0.41, | |
"acc_norm_stderr": 0.049431107042371025 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.5181347150259067, | |
"acc_stderr": 0.036060650018329185, | |
"acc_norm": 0.5181347150259067, | |
"acc_norm_stderr": 0.036060650018329185 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.3157894736842105, | |
"acc_stderr": 0.043727482902780085, | |
"acc_norm": 0.3157894736842105, | |
"acc_norm_stderr": 0.043727482902780085 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.47522935779816516, | |
"acc_stderr": 0.02141099975363592, | |
"acc_norm": 0.47522935779816516, | |
"acc_norm_stderr": 0.02141099975363592 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.35714285714285715, | |
"acc_stderr": 0.04285714285714281, | |
"acc_norm": 0.35714285714285715, | |
"acc_norm_stderr": 0.04285714285714281 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.45098039215686275, | |
"acc_stderr": 0.028491993586171573, | |
"acc_norm": 0.45098039215686275, | |
"acc_norm_stderr": 0.028491993586171573 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.5, | |
"acc_stderr": 0.050251890762960605, | |
"acc_norm": 0.5, | |
"acc_norm_stderr": 0.050251890762960605 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.6528925619834711, | |
"acc_stderr": 0.04345724570292534, | |
"acc_norm": 0.6528925619834711, | |
"acc_norm_stderr": 0.04345724570292534 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.39473684210526316, | |
"acc_stderr": 0.03977749934622074, | |
"acc_norm": 0.39473684210526316, | |
"acc_norm_stderr": 0.03977749934622074 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.3872549019607843, | |
"acc_stderr": 0.019706875804085627, | |
"acc_norm": 0.3872549019607843, | |
"acc_norm_stderr": 0.019706875804085627 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.36524822695035464, | |
"acc_stderr": 0.028723863853281278, | |
"acc_norm": 0.36524822695035464, | |
"acc_norm_stderr": 0.028723863853281278 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.39285714285714285, | |
"acc_stderr": 0.04635550135609976, | |
"acc_norm": 0.39285714285714285, | |
"acc_norm_stderr": 0.04635550135609976 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.39351851851851855, | |
"acc_stderr": 0.03331747876370312, | |
"acc_norm": 0.39351851851851855, | |
"acc_norm_stderr": 0.03331747876370312 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.21564245810055865, | |
"acc_stderr": 0.013754835975482355, | |
"acc_norm": 0.21564245810055865, | |
"acc_norm_stderr": 0.013754835975482355 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.42, | |
"acc_stderr": 0.049604496374885836, | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.049604496374885836 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.58, | |
"acc_stderr": 0.049604496374885836, | |
"acc_norm": 0.58, | |
"acc_norm_stderr": 0.049604496374885836 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.36764705882352944, | |
"acc_stderr": 0.029289413409403192, | |
"acc_norm": 0.36764705882352944, | |
"acc_norm_stderr": 0.029289413409403192 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.43673469387755104, | |
"acc_stderr": 0.03175195237583322, | |
"acc_norm": 0.43673469387755104, | |
"acc_norm_stderr": 0.03175195237583322 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.5907172995780591, | |
"acc_stderr": 0.03200704183359591, | |
"acc_norm": 0.5907172995780591, | |
"acc_norm_stderr": 0.03200704183359591 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.31747066492829207, | |
"acc_stderr": 0.01188889206880931, | |
"acc_norm": 0.31747066492829207, | |
"acc_norm_stderr": 0.01188889206880931 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.5098039215686274, | |
"acc_stderr": 0.03508637358630572, | |
"acc_norm": 0.5098039215686274, | |
"acc_norm_stderr": 0.03508637358630572 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.4666666666666667, | |
"acc_stderr": 0.03895658065271846, | |
"acc_norm": 0.4666666666666667, | |
"acc_norm_stderr": 0.03895658065271846 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.2876376988984088, | |
"mc1_stderr": 0.015846315101394823, | |
"mc2": 0.47510378175366297, | |
"mc2_stderr": 0.015686785961170725 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.4297520661157025, | |
"acc_stderr": 0.017019847535972205, | |
"acc_norm": 0.48642266824085006, | |
"acc_norm_stderr": 0.017184015060401448 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "MNCJ1hun/MIstral-11B-Omni-OP-u1k-ver0.1", | |
"model_sha": "3cf7eb4c014f181bec2a9b36897771b2710422d1", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |