|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.20392491467576793, |
|
"acc_stderr": 0.011774262478702254, |
|
"acc_norm": 0.2551194539249147, |
|
"acc_norm_stderr": 0.012739038695202102 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.2531368253335989, |
|
"acc_stderr": 0.0043392003634544945, |
|
"acc_norm": 0.2502489543915555, |
|
"acc_norm_stderr": 0.004322710911026373 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.391812865497076, |
|
"acc_stderr": 0.03743979825926401, |
|
"acc_norm": 0.391812865497076, |
|
"acc_norm_stderr": 0.03743979825926401 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2815533980582524, |
|
"acc_stderr": 0.044532548363264673, |
|
"acc_norm": 0.2815533980582524, |
|
"acc_norm_stderr": 0.044532548363264673 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.30779054916985954, |
|
"acc_stderr": 0.016506045045155633, |
|
"acc_norm": 0.30779054916985954, |
|
"acc_norm_stderr": 0.016506045045155633 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.37777777777777777, |
|
"acc_stderr": 0.04188307537595853, |
|
"acc_norm": 0.37777777777777777, |
|
"acc_norm_stderr": 0.04188307537595853 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2425531914893617, |
|
"acc_stderr": 0.028020226271200217, |
|
"acc_norm": 0.2425531914893617, |
|
"acc_norm_stderr": 0.028020226271200217 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3072289156626506, |
|
"acc_stderr": 0.035915667978246635, |
|
"acc_norm": 0.3072289156626506, |
|
"acc_norm_stderr": 0.035915667978246635 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.35691318327974275, |
|
"acc_stderr": 0.02721042037593403, |
|
"acc_norm": 0.35691318327974275, |
|
"acc_norm_stderr": 0.02721042037593403 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.242152466367713, |
|
"acc_stderr": 0.028751392398694755, |
|
"acc_norm": 0.242152466367713, |
|
"acc_norm_stderr": 0.028751392398694755 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3435114503816794, |
|
"acc_stderr": 0.041649760719448786, |
|
"acc_norm": 0.3435114503816794, |
|
"acc_norm_stderr": 0.041649760719448786 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.29292929292929293, |
|
"acc_stderr": 0.03242497958178817, |
|
"acc_norm": 0.29292929292929293, |
|
"acc_norm_stderr": 0.03242497958178817 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3586206896551724, |
|
"acc_stderr": 0.039966295748767186, |
|
"acc_norm": 0.3586206896551724, |
|
"acc_norm_stderr": 0.039966295748767186 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237656, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237656 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.2773109243697479, |
|
"acc_stderr": 0.029079374539480007, |
|
"acc_norm": 0.2773109243697479, |
|
"acc_norm_stderr": 0.029079374539480007 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2564102564102564, |
|
"acc_stderr": 0.022139081103971527, |
|
"acc_norm": 0.2564102564102564, |
|
"acc_norm_stderr": 0.022139081103971527 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145633, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145633 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.28703703703703703, |
|
"acc_stderr": 0.043733130409147614, |
|
"acc_norm": 0.28703703703703703, |
|
"acc_norm_stderr": 0.043733130409147614 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.30049261083743845, |
|
"acc_stderr": 0.03225799476233484, |
|
"acc_norm": 0.30049261083743845, |
|
"acc_norm_stderr": 0.03225799476233484 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.31290322580645163, |
|
"acc_stderr": 0.026377567028645854, |
|
"acc_norm": 0.31290322580645163, |
|
"acc_norm_stderr": 0.026377567028645854 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2948717948717949, |
|
"acc_stderr": 0.029872577708891148, |
|
"acc_norm": 0.2948717948717949, |
|
"acc_norm_stderr": 0.029872577708891148 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.24528301886792453, |
|
"acc_stderr": 0.026480357179895702, |
|
"acc_norm": 0.24528301886792453, |
|
"acc_norm_stderr": 0.026480357179895702 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.22727272727272727, |
|
"acc_stderr": 0.040139645540727735, |
|
"acc_norm": 0.22727272727272727, |
|
"acc_norm_stderr": 0.040139645540727735 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.29259259259259257, |
|
"acc_stderr": 0.02773896963217609, |
|
"acc_norm": 0.29259259259259257, |
|
"acc_norm_stderr": 0.02773896963217609 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2980132450331126, |
|
"acc_stderr": 0.037345356767871984, |
|
"acc_norm": 0.2980132450331126, |
|
"acc_norm_stderr": 0.037345356767871984 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.26865671641791045, |
|
"acc_stderr": 0.03134328358208954, |
|
"acc_norm": 0.26865671641791045, |
|
"acc_norm_stderr": 0.03134328358208954 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.28901734104046245, |
|
"acc_stderr": 0.03456425745086999, |
|
"acc_norm": 0.28901734104046245, |
|
"acc_norm_stderr": 0.03456425745086999 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.291005291005291, |
|
"acc_stderr": 0.02339382650048488, |
|
"acc_norm": 0.291005291005291, |
|
"acc_norm_stderr": 0.02339382650048488 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.24305555555555555, |
|
"acc_stderr": 0.03586879280080342, |
|
"acc_norm": 0.24305555555555555, |
|
"acc_norm_stderr": 0.03586879280080342 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.04163331998932269, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.04163331998932269 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.31213872832369943, |
|
"acc_stderr": 0.024946792225272314, |
|
"acc_norm": 0.31213872832369943, |
|
"acc_norm_stderr": 0.024946792225272314 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3374233128834356, |
|
"acc_stderr": 0.03714908409935574, |
|
"acc_norm": 0.3374233128834356, |
|
"acc_norm_stderr": 0.03714908409935574 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2808641975308642, |
|
"acc_stderr": 0.025006469755799208, |
|
"acc_norm": 0.2808641975308642, |
|
"acc_norm_stderr": 0.025006469755799208 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816508, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816508 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.2694300518134715, |
|
"acc_stderr": 0.03201867122877793, |
|
"acc_norm": 0.2694300518134715, |
|
"acc_norm_stderr": 0.03201867122877793 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03999423879281336, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03999423879281336 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.28073394495412846, |
|
"acc_stderr": 0.019266055045871616, |
|
"acc_norm": 0.28073394495412846, |
|
"acc_norm_stderr": 0.019266055045871616 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.16666666666666666, |
|
"acc_stderr": 0.03333333333333338, |
|
"acc_norm": 0.16666666666666666, |
|
"acc_norm_stderr": 0.03333333333333338 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.2908496732026144, |
|
"acc_stderr": 0.026004800363952113, |
|
"acc_norm": 0.2908496732026144, |
|
"acc_norm_stderr": 0.026004800363952113 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768078, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768078 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.4297520661157025, |
|
"acc_stderr": 0.04519082021319773, |
|
"acc_norm": 0.4297520661157025, |
|
"acc_norm_stderr": 0.04519082021319773 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.40131578947368424, |
|
"acc_stderr": 0.039889037033362836, |
|
"acc_norm": 0.40131578947368424, |
|
"acc_norm_stderr": 0.039889037033362836 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2875816993464052, |
|
"acc_stderr": 0.018311653053648222, |
|
"acc_norm": 0.2875816993464052, |
|
"acc_norm_stderr": 0.018311653053648222 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.23404255319148937, |
|
"acc_stderr": 0.025257861359432397, |
|
"acc_norm": 0.23404255319148937, |
|
"acc_norm_stderr": 0.025257861359432397 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.29464285714285715, |
|
"acc_stderr": 0.04327040932578732, |
|
"acc_norm": 0.29464285714285715, |
|
"acc_norm_stderr": 0.04327040932578732 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.03275773486100999, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.03275773486100999 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2446927374301676, |
|
"acc_stderr": 0.014378169884098426, |
|
"acc_norm": 0.2446927374301676, |
|
"acc_norm_stderr": 0.014378169884098426 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.04461960433384739, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.04461960433384739 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.1948529411764706, |
|
"acc_stderr": 0.02406059942348742, |
|
"acc_norm": 0.1948529411764706, |
|
"acc_norm_stderr": 0.02406059942348742 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.24489795918367346, |
|
"acc_stderr": 0.027529637440174927, |
|
"acc_norm": 0.24489795918367346, |
|
"acc_norm_stderr": 0.027529637440174927 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.270042194092827, |
|
"acc_stderr": 0.028900721906293426, |
|
"acc_norm": 0.270042194092827, |
|
"acc_norm_stderr": 0.028900721906293426 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2620599739243807, |
|
"acc_stderr": 0.011231552795890394, |
|
"acc_norm": 0.2620599739243807, |
|
"acc_norm_stderr": 0.011231552795890394 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.2696078431372549, |
|
"acc_stderr": 0.03114557065948678, |
|
"acc_norm": 0.2696078431372549, |
|
"acc_norm_stderr": 0.03114557065948678 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.24848484848484848, |
|
"acc_stderr": 0.03374402644139405, |
|
"acc_norm": 0.24848484848484848, |
|
"acc_norm_stderr": 0.03374402644139405 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2423500611995104, |
|
"mc1_stderr": 0.015000674373570342, |
|
"mc2": 0.502230955672644, |
|
"mc2_stderr": 0.017048304732843935 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.10271546635182999, |
|
"acc_stderr": 0.010437532255238496, |
|
"acc_norm": 0.3695395513577332, |
|
"acc_norm_stderr": 0.01659488340568542 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "4yo1/llama3-pre1-pre2-ds-ins2-lora3", |
|
"model_sha": "e36e97d7503ad2c1d406edd928de720fa514d1ef", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |