{ "results": { "mmlu": { "acc,none": 0.45898020225039166, "acc_stderr,none": 0.11262778552466646, "alias": "mmlu" }, "mmlu_humanities": { "alias": " - humanities", "acc,none": 0.43846971307120086, "acc_stderr,none": 0.12139289154802127 }, "mmlu_formal_logic": { "alias": " - formal_logic", "acc,none": 0.3333333333333333, "acc_stderr,none": 0.04216370213557835 }, "mmlu_high_school_european_history": { "alias": " - high_school_european_history", "acc,none": 0.5636363636363636, "acc_stderr,none": 0.03872592983524753 }, "mmlu_high_school_us_history": { "alias": " - high_school_us_history", "acc,none": 0.6225490196078431, "acc_stderr,none": 0.03402272044340705 }, "mmlu_high_school_world_history": { "alias": " - high_school_world_history", "acc,none": 0.6329113924050633, "acc_stderr,none": 0.031376240725616185 }, "mmlu_international_law": { "alias": " - international_law", "acc,none": 0.6528925619834711, "acc_stderr,none": 0.043457245702925335 }, "mmlu_jurisprudence": { "alias": " - jurisprudence", "acc,none": 0.5555555555555556, "acc_stderr,none": 0.04803752235190193 }, "mmlu_logical_fallacies": { "alias": " - logical_fallacies", "acc,none": 0.48466257668711654, "acc_stderr,none": 0.039265223787088424 }, "mmlu_moral_disputes": { "alias": " - moral_disputes", "acc,none": 0.5520231213872833, "acc_stderr,none": 0.026772990653361837 }, "mmlu_moral_scenarios": { "alias": " - moral_scenarios", "acc,none": 0.24692737430167597, "acc_stderr,none": 0.01442229220480886 }, "mmlu_philosophy": { "alias": " - philosophy", "acc,none": 0.5305466237942122, "acc_stderr,none": 0.028345045864840622 }, "mmlu_prehistory": { "alias": " - prehistory", "acc,none": 0.5771604938271605, "acc_stderr,none": 0.027487472980871595 }, "mmlu_professional_law": { "alias": " - professional_law", "acc,none": 0.36571056062581486, "acc_stderr,none": 0.012301028188840568 }, "mmlu_world_religions": { "alias": " - world_religions", "acc,none": 0.631578947368421, "acc_stderr,none": 0.03699658017656878 }, "mmlu_other": { "alias": " - other", "acc,none": 0.5236562600579338, "acc_stderr,none": 0.09481351529303671 }, "mmlu_business_ethics": { "alias": " - business_ethics", "acc,none": 0.53, "acc_stderr,none": 0.05016135580465919 }, "mmlu_clinical_knowledge": { "alias": " - clinical_knowledge", "acc,none": 0.4867924528301887, "acc_stderr,none": 0.030762134874500482 }, "mmlu_college_medicine": { "alias": " - college_medicine", "acc,none": 0.4161849710982659, "acc_stderr,none": 0.03758517775404947 }, "mmlu_global_facts": { "alias": " - global_facts", "acc,none": 0.3, "acc_stderr,none": 0.046056618647183814 }, "mmlu_human_aging": { "alias": " - human_aging", "acc,none": 0.5426008968609866, "acc_stderr,none": 0.03343577705583065 }, "mmlu_management": { "alias": " - management", "acc,none": 0.6990291262135923, "acc_stderr,none": 0.04541609446503948 }, "mmlu_marketing": { "alias": " - marketing", "acc,none": 0.6794871794871795, "acc_stderr,none": 0.030572811310299604 }, "mmlu_medical_genetics": { "alias": " - medical_genetics", "acc,none": 0.52, "acc_stderr,none": 0.05021167315686779 }, "mmlu_miscellaneous": { "alias": " - miscellaneous", "acc,none": 0.6296296296296297, "acc_stderr,none": 0.01726860756000577 }, "mmlu_nutrition": { "alias": " - nutrition", "acc,none": 0.5359477124183006, "acc_stderr,none": 0.02855582751652878 }, "mmlu_professional_accounting": { "alias": " - professional_accounting", "acc,none": 0.3723404255319149, "acc_stderr,none": 0.028838921471251458 }, "mmlu_professional_medicine": { "alias": " - professional_medicine", "acc,none": 0.39705882352941174, "acc_stderr,none": 0.029722152099280065 }, "mmlu_virology": { "alias": " - virology", "acc,none": 0.41566265060240964, "acc_stderr,none": 0.038367221765980536 }, "mmlu_social_sciences": { "alias": " - social_sciences", "acc,none": 0.5037374065648359, "acc_stderr,none": 0.09994296884775924 }, "mmlu_econometrics": { "alias": " - econometrics", "acc,none": 0.24561403508771928, "acc_stderr,none": 0.04049339297748141 }, "mmlu_high_school_geography": { "alias": " - high_school_geography", "acc,none": 0.5404040404040404, "acc_stderr,none": 0.035507024651313425 }, "mmlu_high_school_government_and_politics": { "alias": " - high_school_government_and_politics", "acc,none": 0.616580310880829, "acc_stderr,none": 0.03508984236295341 }, "mmlu_high_school_macroeconomics": { "alias": " - high_school_macroeconomics", "acc,none": 0.36153846153846153, "acc_stderr,none": 0.024359581465396987 }, "mmlu_high_school_microeconomics": { "alias": " - high_school_microeconomics", "acc,none": 0.41596638655462187, "acc_stderr,none": 0.03201650100739615 }, "mmlu_high_school_psychology": { "alias": " - high_school_psychology", "acc,none": 0.581651376146789, "acc_stderr,none": 0.02114954859644388 }, "mmlu_human_sexuality": { "alias": " - human_sexuality", "acc,none": 0.5572519083969466, "acc_stderr,none": 0.04356447202665069 }, "mmlu_professional_psychology": { "alias": " - professional_psychology", "acc,none": 0.43300653594771243, "acc_stderr,none": 0.02004544247332422 }, "mmlu_public_relations": { "alias": " - public_relations", "acc,none": 0.5818181818181818, "acc_stderr,none": 0.04724577405731572 }, "mmlu_security_studies": { "alias": " - security_studies", "acc,none": 0.5346938775510204, "acc_stderr,none": 0.03193207024425314 }, "mmlu_sociology": { "alias": " - sociology", "acc,none": 0.6417910447761194, "acc_stderr,none": 0.033903930422688126 }, "mmlu_us_foreign_policy": { "alias": " - us_foreign_policy", "acc,none": 0.77, "acc_stderr,none": 0.042295258468165065 }, "mmlu_stem": { "alias": " - stem", "acc,none": 0.3821757056771329, "acc_stderr,none": 0.0970313380606249 }, "mmlu_abstract_algebra": { "alias": " - abstract_algebra", "acc,none": 0.31, "acc_stderr,none": 0.04648231987117316 }, "mmlu_anatomy": { "alias": " - anatomy", "acc,none": 0.4666666666666667, "acc_stderr,none": 0.043097329010363554 }, "mmlu_astronomy": { "alias": " - astronomy", "acc,none": 0.48026315789473684, "acc_stderr,none": 0.04065771002562605 }, "mmlu_college_biology": { "alias": " - college_biology", "acc,none": 0.4375, "acc_stderr,none": 0.04148415739394154 }, "mmlu_college_chemistry": { "alias": " - college_chemistry", "acc,none": 0.3, "acc_stderr,none": 0.046056618647183814 }, "mmlu_college_computer_science": { "alias": " - college_computer_science", "acc,none": 0.43, "acc_stderr,none": 0.04975698519562428 }, "mmlu_college_mathematics": { "alias": " - college_mathematics", "acc,none": 0.29, "acc_stderr,none": 0.04560480215720684 }, "mmlu_college_physics": { "alias": " - college_physics", "acc,none": 0.2647058823529412, "acc_stderr,none": 0.04389869956808779 }, "mmlu_computer_security": { "alias": " - computer_security", "acc,none": 0.61, "acc_stderr,none": 0.04902071300001975 }, "mmlu_conceptual_physics": { "alias": " - conceptual_physics", "acc,none": 0.40425531914893614, "acc_stderr,none": 0.032081157507886836 }, "mmlu_electrical_engineering": { "alias": " - electrical_engineering", "acc,none": 0.5172413793103449, "acc_stderr,none": 0.04164188720169375 }, "mmlu_elementary_mathematics": { "alias": " - elementary_mathematics", "acc,none": 0.3439153439153439, "acc_stderr,none": 0.024464426625596433 }, "mmlu_high_school_biology": { "alias": " - high_school_biology", "acc,none": 0.5129032258064516, "acc_stderr,none": 0.02843453315268186 }, "mmlu_high_school_chemistry": { "alias": " - high_school_chemistry", "acc,none": 0.33497536945812806, "acc_stderr,none": 0.0332085274234831 }, "mmlu_high_school_computer_science": { "alias": " - high_school_computer_science", "acc,none": 0.51, "acc_stderr,none": 0.05024183937956914 }, "mmlu_high_school_mathematics": { "alias": " - high_school_mathematics", "acc,none": 0.24074074074074073, "acc_stderr,none": 0.026067159222275794 }, "mmlu_high_school_physics": { "alias": " - high_school_physics", "acc,none": 0.31788079470198677, "acc_stderr,none": 0.03802039760107903 }, "mmlu_high_school_statistics": { "alias": " - high_school_statistics", "acc,none": 0.2638888888888889, "acc_stderr,none": 0.03005820270430985 }, "mmlu_machine_learning": { "alias": " - machine_learning", "acc,none": 0.33035714285714285, "acc_stderr,none": 0.04464285714285714 } }, "groups": { "mmlu": { "acc,none": 0.45898020225039166, "acc_stderr,none": 0.11262778552466646, "alias": "mmlu" }, "mmlu_humanities": { "alias": " - humanities", "acc,none": 0.43846971307120086, "acc_stderr,none": 0.12139289154802127 }, "mmlu_other": { "alias": " - other", "acc,none": 0.5236562600579338, "acc_stderr,none": 0.09481351529303671 }, "mmlu_social_sciences": { "alias": " - social_sciences", "acc,none": 0.5037374065648359, "acc_stderr,none": 0.09994296884775924 }, "mmlu_stem": { "alias": " - stem", "acc,none": 0.3821757056771329, "acc_stderr,none": 0.0970313380606249 } }, "configs": { "mmlu_abstract_algebra": { "task": "mmlu_abstract_algebra", "task_alias": "abstract_algebra", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "abstract_algebra", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about abstract algebra.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_anatomy": { "task": "mmlu_anatomy", "task_alias": "anatomy", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "anatomy", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about anatomy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_astronomy": { "task": "mmlu_astronomy", "task_alias": "astronomy", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "astronomy", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about astronomy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_business_ethics": { "task": "mmlu_business_ethics", "task_alias": "business_ethics", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "business_ethics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about business ethics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_clinical_knowledge": { "task": "mmlu_clinical_knowledge", "task_alias": "clinical_knowledge", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "clinical_knowledge", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about clinical knowledge.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_biology": { "task": "mmlu_college_biology", "task_alias": "college_biology", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "college_biology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college biology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_chemistry": { "task": "mmlu_college_chemistry", "task_alias": "college_chemistry", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "college_chemistry", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college chemistry.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_computer_science": { "task": "mmlu_college_computer_science", "task_alias": "college_computer_science", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "college_computer_science", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college computer science.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_mathematics": { "task": "mmlu_college_mathematics", "task_alias": "college_mathematics", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "college_mathematics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college mathematics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_medicine": { "task": "mmlu_college_medicine", "task_alias": "college_medicine", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "college_medicine", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college medicine.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_college_physics": { "task": "mmlu_college_physics", "task_alias": "college_physics", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "college_physics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college physics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_computer_security": { "task": "mmlu_computer_security", "task_alias": "computer_security", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "computer_security", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about computer security.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_conceptual_physics": { "task": "mmlu_conceptual_physics", "task_alias": "conceptual_physics", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "conceptual_physics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about conceptual physics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_econometrics": { "task": "mmlu_econometrics", "task_alias": "econometrics", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "econometrics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about econometrics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_electrical_engineering": { "task": "mmlu_electrical_engineering", "task_alias": "electrical_engineering", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "electrical_engineering", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about electrical engineering.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_elementary_mathematics": { "task": "mmlu_elementary_mathematics", "task_alias": "elementary_mathematics", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "elementary_mathematics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about elementary mathematics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_formal_logic": { "task": "mmlu_formal_logic", "task_alias": "formal_logic", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "formal_logic", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about formal logic.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_global_facts": { "task": "mmlu_global_facts", "task_alias": "global_facts", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "global_facts", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about global facts.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_biology": { "task": "mmlu_high_school_biology", "task_alias": "high_school_biology", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_biology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school biology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_chemistry": { "task": "mmlu_high_school_chemistry", "task_alias": "high_school_chemistry", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_chemistry", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school chemistry.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_computer_science": { "task": "mmlu_high_school_computer_science", "task_alias": "high_school_computer_science", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_computer_science", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school computer science.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_european_history": { "task": "mmlu_high_school_european_history", "task_alias": "high_school_european_history", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_european_history", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school european history.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_geography": { "task": "mmlu_high_school_geography", "task_alias": "high_school_geography", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_geography", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school geography.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_government_and_politics": { "task": "mmlu_high_school_government_and_politics", "task_alias": "high_school_government_and_politics", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_government_and_politics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school government and politics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_macroeconomics": { "task": "mmlu_high_school_macroeconomics", "task_alias": "high_school_macroeconomics", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_macroeconomics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school macroeconomics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_mathematics": { "task": "mmlu_high_school_mathematics", "task_alias": "high_school_mathematics", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_mathematics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school mathematics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_microeconomics": { "task": "mmlu_high_school_microeconomics", "task_alias": "high_school_microeconomics", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_microeconomics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school microeconomics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_physics": { "task": "mmlu_high_school_physics", "task_alias": "high_school_physics", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_physics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school physics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_psychology": { "task": "mmlu_high_school_psychology", "task_alias": "high_school_psychology", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_psychology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school psychology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_statistics": { "task": "mmlu_high_school_statistics", "task_alias": "high_school_statistics", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_statistics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school statistics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_us_history": { "task": "mmlu_high_school_us_history", "task_alias": "high_school_us_history", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_us_history", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school us history.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_high_school_world_history": { "task": "mmlu_high_school_world_history", "task_alias": "high_school_world_history", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "high_school_world_history", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school world history.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_human_aging": { "task": "mmlu_human_aging", "task_alias": "human_aging", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "human_aging", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about human aging.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_human_sexuality": { "task": "mmlu_human_sexuality", "task_alias": "human_sexuality", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "human_sexuality", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about human sexuality.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_international_law": { "task": "mmlu_international_law", "task_alias": "international_law", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "international_law", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about international law.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_jurisprudence": { "task": "mmlu_jurisprudence", "task_alias": "jurisprudence", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "jurisprudence", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about jurisprudence.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_logical_fallacies": { "task": "mmlu_logical_fallacies", "task_alias": "logical_fallacies", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "logical_fallacies", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about logical fallacies.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_machine_learning": { "task": "mmlu_machine_learning", "task_alias": "machine_learning", "group": "mmlu_stem", "group_alias": "stem", "dataset_path": "hails/mmlu_no_train", "dataset_name": "machine_learning", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about machine learning.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_management": { "task": "mmlu_management", "task_alias": "management", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "management", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about management.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_marketing": { "task": "mmlu_marketing", "task_alias": "marketing", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "marketing", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about marketing.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_medical_genetics": { "task": "mmlu_medical_genetics", "task_alias": "medical_genetics", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "medical_genetics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about medical genetics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_miscellaneous": { "task": "mmlu_miscellaneous", "task_alias": "miscellaneous", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "miscellaneous", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about miscellaneous.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_moral_disputes": { "task": "mmlu_moral_disputes", "task_alias": "moral_disputes", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "moral_disputes", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about moral disputes.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_moral_scenarios": { "task": "mmlu_moral_scenarios", "task_alias": "moral_scenarios", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "moral_scenarios", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about moral scenarios.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_nutrition": { "task": "mmlu_nutrition", "task_alias": "nutrition", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "nutrition", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about nutrition.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_philosophy": { "task": "mmlu_philosophy", "task_alias": "philosophy", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "philosophy", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about philosophy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_prehistory": { "task": "mmlu_prehistory", "task_alias": "prehistory", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "prehistory", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about prehistory.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_professional_accounting": { "task": "mmlu_professional_accounting", "task_alias": "professional_accounting", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "professional_accounting", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional accounting.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_professional_law": { "task": "mmlu_professional_law", "task_alias": "professional_law", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "professional_law", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional law.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_professional_medicine": { "task": "mmlu_professional_medicine", "task_alias": "professional_medicine", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "professional_medicine", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional medicine.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_professional_psychology": { "task": "mmlu_professional_psychology", "task_alias": "professional_psychology", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "professional_psychology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional psychology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_public_relations": { "task": "mmlu_public_relations", "task_alias": "public_relations", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "public_relations", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about public relations.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_security_studies": { "task": "mmlu_security_studies", "task_alias": "security_studies", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "security_studies", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about security studies.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_sociology": { "task": "mmlu_sociology", "task_alias": "sociology", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "sociology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about sociology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_us_foreign_policy": { "task": "mmlu_us_foreign_policy", "task_alias": "us_foreign_policy", "group": "mmlu_social_sciences", "group_alias": "social_sciences", "dataset_path": "hails/mmlu_no_train", "dataset_name": "us_foreign_policy", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about us foreign policy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_virology": { "task": "mmlu_virology", "task_alias": "virology", "group": "mmlu_other", "group_alias": "other", "dataset_path": "hails/mmlu_no_train", "dataset_name": "virology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about virology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } }, "mmlu_world_religions": { "task": "mmlu_world_religions", "task_alias": "world_religions", "group": "mmlu_humanities", "group_alias": "humanities", "dataset_path": "hails/mmlu_no_train", "dataset_name": "world_religions", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about world religions.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 0.0 } } }, "versions": { "mmlu": "N/A", "mmlu_abstract_algebra": "Yaml", "mmlu_anatomy": "Yaml", "mmlu_astronomy": "Yaml", "mmlu_business_ethics": "Yaml", "mmlu_clinical_knowledge": "Yaml", "mmlu_college_biology": "Yaml", "mmlu_college_chemistry": "Yaml", "mmlu_college_computer_science": "Yaml", "mmlu_college_mathematics": "Yaml", "mmlu_college_medicine": "Yaml", "mmlu_college_physics": "Yaml", "mmlu_computer_security": "Yaml", "mmlu_conceptual_physics": "Yaml", "mmlu_econometrics": "Yaml", "mmlu_electrical_engineering": "Yaml", "mmlu_elementary_mathematics": "Yaml", "mmlu_formal_logic": "Yaml", "mmlu_global_facts": "Yaml", "mmlu_high_school_biology": "Yaml", "mmlu_high_school_chemistry": "Yaml", "mmlu_high_school_computer_science": "Yaml", "mmlu_high_school_european_history": "Yaml", "mmlu_high_school_geography": "Yaml", "mmlu_high_school_government_and_politics": "Yaml", "mmlu_high_school_macroeconomics": "Yaml", "mmlu_high_school_mathematics": "Yaml", "mmlu_high_school_microeconomics": "Yaml", "mmlu_high_school_physics": "Yaml", "mmlu_high_school_psychology": "Yaml", "mmlu_high_school_statistics": "Yaml", "mmlu_high_school_us_history": "Yaml", "mmlu_high_school_world_history": "Yaml", "mmlu_human_aging": "Yaml", "mmlu_human_sexuality": "Yaml", "mmlu_humanities": "N/A", "mmlu_international_law": "Yaml", "mmlu_jurisprudence": "Yaml", "mmlu_logical_fallacies": "Yaml", "mmlu_machine_learning": "Yaml", "mmlu_management": "Yaml", "mmlu_marketing": "Yaml", "mmlu_medical_genetics": "Yaml", "mmlu_miscellaneous": "Yaml", "mmlu_moral_disputes": "Yaml", "mmlu_moral_scenarios": "Yaml", "mmlu_nutrition": "Yaml", "mmlu_other": "N/A", "mmlu_philosophy": "Yaml", "mmlu_prehistory": "Yaml", "mmlu_professional_accounting": "Yaml", "mmlu_professional_law": "Yaml", "mmlu_professional_medicine": "Yaml", "mmlu_professional_psychology": "Yaml", "mmlu_public_relations": "Yaml", "mmlu_security_studies": "Yaml", "mmlu_social_sciences": "N/A", "mmlu_sociology": "Yaml", "mmlu_stem": "N/A", "mmlu_us_foreign_policy": "Yaml", "mmlu_virology": "Yaml", "mmlu_world_religions": "Yaml" }, "n-shot": { "mmlu": 0, "mmlu_abstract_algebra": 0, "mmlu_anatomy": 0, "mmlu_astronomy": 0, "mmlu_business_ethics": 0, "mmlu_clinical_knowledge": 0, "mmlu_college_biology": 0, "mmlu_college_chemistry": 0, "mmlu_college_computer_science": 0, "mmlu_college_mathematics": 0, "mmlu_college_medicine": 0, "mmlu_college_physics": 0, "mmlu_computer_security": 0, "mmlu_conceptual_physics": 0, "mmlu_econometrics": 0, "mmlu_electrical_engineering": 0, "mmlu_elementary_mathematics": 0, "mmlu_formal_logic": 0, "mmlu_global_facts": 0, "mmlu_high_school_biology": 0, "mmlu_high_school_chemistry": 0, "mmlu_high_school_computer_science": 0, "mmlu_high_school_european_history": 0, "mmlu_high_school_geography": 0, "mmlu_high_school_government_and_politics": 0, "mmlu_high_school_macroeconomics": 0, "mmlu_high_school_mathematics": 0, "mmlu_high_school_microeconomics": 0, "mmlu_high_school_physics": 0, "mmlu_high_school_psychology": 0, "mmlu_high_school_statistics": 0, "mmlu_high_school_us_history": 0, "mmlu_high_school_world_history": 0, "mmlu_human_aging": 0, "mmlu_human_sexuality": 0, "mmlu_humanities": 0, "mmlu_international_law": 0, "mmlu_jurisprudence": 0, "mmlu_logical_fallacies": 0, "mmlu_machine_learning": 0, "mmlu_management": 0, "mmlu_marketing": 0, "mmlu_medical_genetics": 0, "mmlu_miscellaneous": 0, "mmlu_moral_disputes": 0, "mmlu_moral_scenarios": 0, "mmlu_nutrition": 0, "mmlu_other": 0, "mmlu_philosophy": 0, "mmlu_prehistory": 0, "mmlu_professional_accounting": 0, "mmlu_professional_law": 0, "mmlu_professional_medicine": 0, "mmlu_professional_psychology": 0, "mmlu_public_relations": 0, "mmlu_security_studies": 0, "mmlu_social_sciences": 0, "mmlu_sociology": 0, "mmlu_stem": 0, "mmlu_us_foreign_policy": 0, "mmlu_virology": 0, "mmlu_world_religions": 0 }, "config": { "model": "hf", "model_args": "pretrained=baichuan-inc/Baichuan2-7B-Base,trust_remote_code=True,load_in_4bit=True,peft=./out/lora/p15", "batch_size": "16", "batch_sizes": [], "device": "cuda:0", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null }, "git_hash": "dd6c6de" }