results / 4yo1 /llama3-pre1-ds-lora1 /result_2024-07-18 01:07:43.json
choco9966
add backup results
70a679f
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.20136518771331058,
"acc_stderr": 0.011718927477444265,
"acc_norm": 0.24914675767918087,
"acc_norm_stderr": 0.012639407111926437
},
"harness|ko_hellaswag|10": {
"acc": 0.26110336586337385,
"acc_stderr": 0.004383384784038473,
"acc_norm": 0.2729535949014141,
"acc_norm_stderr": 0.004445667638734141
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.26900584795321636,
"acc_stderr": 0.0340105262010409,
"acc_norm": 0.26900584795321636,
"acc_norm_stderr": 0.0340105262010409
},
"harness|ko_mmlu_management|5": {
"acc": 0.24271844660194175,
"acc_stderr": 0.04245022486384493,
"acc_norm": 0.24271844660194175,
"acc_norm_stderr": 0.04245022486384493
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.2720306513409962,
"acc_stderr": 0.015913367447500517,
"acc_norm": 0.2720306513409962,
"acc_norm_stderr": 0.015913367447500517
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.040943762699967946,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.040943762699967946
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2936170212765957,
"acc_stderr": 0.029771642712491227,
"acc_norm": 0.2936170212765957,
"acc_norm_stderr": 0.029771642712491227
},
"harness|ko_mmlu_virology|5": {
"acc": 0.25301204819277107,
"acc_stderr": 0.03384429155233135,
"acc_norm": 0.25301204819277107,
"acc_norm_stderr": 0.03384429155233135
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.27009646302250806,
"acc_stderr": 0.025218040373410622,
"acc_norm": 0.27009646302250806,
"acc_norm_stderr": 0.025218040373410622
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.25112107623318386,
"acc_stderr": 0.029105220833224615,
"acc_norm": 0.25112107623318386,
"acc_norm_stderr": 0.029105220833224615
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.25190839694656486,
"acc_stderr": 0.038073871163060866,
"acc_norm": 0.25190839694656486,
"acc_norm_stderr": 0.038073871163060866
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.18,
"acc_stderr": 0.03861229196653694,
"acc_norm": 0.18,
"acc_norm_stderr": 0.03861229196653694
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.22727272727272727,
"acc_stderr": 0.02985751567338642,
"acc_norm": 0.22727272727272727,
"acc_norm_stderr": 0.02985751567338642
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3724137931034483,
"acc_stderr": 0.040287315329475604,
"acc_norm": 0.3724137931034483,
"acc_norm_stderr": 0.040287315329475604
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.13725490196078433,
"acc_stderr": 0.0342408466989152,
"acc_norm": 0.13725490196078433,
"acc_norm_stderr": 0.0342408466989152
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.23109243697478993,
"acc_stderr": 0.027381406927868966,
"acc_norm": 0.23109243697478993,
"acc_norm_stderr": 0.027381406927868966
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2358974358974359,
"acc_stderr": 0.021525965407408726,
"acc_norm": 0.2358974358974359,
"acc_norm_stderr": 0.021525965407408726
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252606,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252606
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.28703703703703703,
"acc_stderr": 0.043733130409147614,
"acc_norm": 0.28703703703703703,
"acc_norm_stderr": 0.043733130409147614
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.33497536945812806,
"acc_stderr": 0.033208527423483104,
"acc_norm": 0.33497536945812806,
"acc_norm_stderr": 0.033208527423483104
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2903225806451613,
"acc_stderr": 0.025822106119415898,
"acc_norm": 0.2903225806451613,
"acc_norm_stderr": 0.025822106119415898
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.27350427350427353,
"acc_stderr": 0.029202540153431173,
"acc_norm": 0.27350427350427353,
"acc_norm_stderr": 0.029202540153431173
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.27169811320754716,
"acc_stderr": 0.027377706624670713,
"acc_norm": 0.27169811320754716,
"acc_norm_stderr": 0.027377706624670713
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.040693063197213754,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.040693063197213754
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.02794045713622842,
"acc_norm": 0.3,
"acc_norm_stderr": 0.02794045713622842
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.24503311258278146,
"acc_stderr": 0.03511807571804724,
"acc_norm": 0.24503311258278146,
"acc_norm_stderr": 0.03511807571804724
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.2885572139303483,
"acc_stderr": 0.03203841040213321,
"acc_norm": 0.2885572139303483,
"acc_norm_stderr": 0.03203841040213321
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.23699421965317918,
"acc_stderr": 0.03242414757483098,
"acc_norm": 0.23699421965317918,
"acc_norm_stderr": 0.03242414757483098
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.02351729433596329,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.02351729433596329
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.22916666666666666,
"acc_stderr": 0.03514697467862388,
"acc_norm": 0.22916666666666666,
"acc_norm_stderr": 0.03514697467862388
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909282,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909282
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.4,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.2976878612716763,
"acc_stderr": 0.024617055388677,
"acc_norm": 0.2976878612716763,
"acc_norm_stderr": 0.024617055388677
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.34355828220858897,
"acc_stderr": 0.037311335196738925,
"acc_norm": 0.34355828220858897,
"acc_norm_stderr": 0.037311335196738925
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.32098765432098764,
"acc_stderr": 0.025976566010862737,
"acc_norm": 0.32098765432098764,
"acc_norm_stderr": 0.025976566010862737
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.21243523316062177,
"acc_stderr": 0.02951928261681725,
"acc_norm": 0.21243523316062177,
"acc_norm_stderr": 0.02951928261681725
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.21929824561403508,
"acc_stderr": 0.03892431106518752,
"acc_norm": 0.21929824561403508,
"acc_norm_stderr": 0.03892431106518752
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.23669724770642203,
"acc_stderr": 0.01822407811729908,
"acc_norm": 0.23669724770642203,
"acc_norm_stderr": 0.01822407811729908
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.23809523809523808,
"acc_stderr": 0.03809523809523813,
"acc_norm": 0.23809523809523808,
"acc_norm_stderr": 0.03809523809523813
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.3202614379084967,
"acc_stderr": 0.026716118380156834,
"acc_norm": 0.3202614379084967,
"acc_norm_stderr": 0.026716118380156834
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720685,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720685
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.39669421487603307,
"acc_stderr": 0.04465869780531009,
"acc_norm": 0.39669421487603307,
"acc_norm_stderr": 0.04465869780531009
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3355263157894737,
"acc_stderr": 0.03842498559395269,
"acc_norm": 0.3355263157894737,
"acc_norm_stderr": 0.03842498559395269
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.018120224251484587,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.018120224251484587
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2553191489361702,
"acc_stderr": 0.026011992930902002,
"acc_norm": 0.2553191489361702,
"acc_norm_stderr": 0.026011992930902002
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.32142857142857145,
"acc_stderr": 0.04432804055291519,
"acc_norm": 0.32142857142857145,
"acc_norm_stderr": 0.04432804055291519
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.18055555555555555,
"acc_stderr": 0.026232878971491656,
"acc_norm": 0.18055555555555555,
"acc_norm_stderr": 0.026232878971491656
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.25027932960893856,
"acc_stderr": 0.01448750085285042,
"acc_norm": 0.25027932960893856,
"acc_norm_stderr": 0.01448750085285042
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.21323529411764705,
"acc_stderr": 0.024880971512294254,
"acc_norm": 0.21323529411764705,
"acc_norm_stderr": 0.024880971512294254
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.23265306122448978,
"acc_stderr": 0.02704925791589618,
"acc_norm": 0.23265306122448978,
"acc_norm_stderr": 0.02704925791589618
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.26582278481012656,
"acc_stderr": 0.02875679962965834,
"acc_norm": 0.26582278481012656,
"acc_norm_stderr": 0.02875679962965834
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.26792698826597133,
"acc_stderr": 0.011311347690633869,
"acc_norm": 0.26792698826597133,
"acc_norm_stderr": 0.011311347690633869
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.030587591351604246,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.030587591351604246
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.2545454545454545,
"acc_stderr": 0.0340150671524904,
"acc_norm": 0.2545454545454545,
"acc_norm_stderr": 0.0340150671524904
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.24969400244798043,
"mc1_stderr": 0.015152286907148128,
"mc2": 0.4821689215890819,
"mc2_stderr": 0.016978019371229284
},
"harness|ko_commongen_v2|2": {
"acc": 0.10861865407319952,
"acc_stderr": 0.010697906495255899,
"acc_norm": 0.32113341204250295,
"acc_norm_stderr": 0.016052762579111573
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "4yo1/llama3-pre1-ds-lora1",
"model_sha": "e958846fd51d3fff3221716153cbd7a2df924dd2",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}