euro-llm-leaderboard-requests
/
jpacifico
/French-Alpaca-Llama3-8B-Instruct-v1.0
/results_2024_06_03T12-24-56.json
{ | |
"config_general": { | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": "auto:6", | |
"max_samples": "null", | |
"job_id": "", | |
"model_name": "jpacifico/French-Alpaca-Llama3-8B-Instruct-v1.0", | |
"model_sha": "", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "" | |
}, | |
"results": { | |
"harness|hellaswag_fr|10": { | |
"acc,none": 0.5063182694367102, | |
"acc_stderr,none": 0.005174062227988366, | |
"acc_norm,none": 0.67551938316556, | |
"acc_norm_stderr,none": 0.004845176560636152, | |
"alias": "hellaswag_fr" | |
}, | |
"harness|arc_challenge_m_it|25": { | |
"acc,none": 0.5089820359281437, | |
"acc_stderr,none": 0.014627782577773944, | |
"acc_norm,none": 0.5397775876817793, | |
"acc_norm_stderr,none": 0.01458377263416605, | |
"alias": "arc_challenge_m_it" | |
}, | |
"harness|gsm8k|5": { | |
"exact_match,get-answer": 0.66565579984837, | |
"exact_match_stderr,get-answer": 0.012994634003332763, | |
"alias": "gsm8k" | |
}, | |
"harness|truthfulqa_mc2|0": { | |
"acc,none": 0.49419688087237895, | |
"acc_stderr,none": 0.014691972386008712, | |
"alias": "truthfulqa_mc2" | |
}, | |
"harness|arc_challenge_m_fr|25": { | |
"acc,none": 0.49871685201026517, | |
"acc_stderr,none": 0.01463009522328876, | |
"acc_norm,none": 0.5431993156544055, | |
"acc_norm_stderr,none": 0.014575436127048211, | |
"alias": "arc_challenge_m_fr" | |
}, | |
"harness|mmlu_m_fr|5": { | |
"acc,none": 0.5703154839202506, | |
"acc_stderr,none": 0.004326758046899862, | |
"alias": "mmlu_m_fr" | |
}, | |
"harness|truthfulqa_mc2_m_es|0": { | |
"acc,none": 0.2674271229404309, | |
"acc_stderr,none": 0.01576757970059163, | |
"alias": "truthfulqa_mc2_m_es" | |
}, | |
"harness|belebele_eng_Latn|5": { | |
"acc,none": 0.9055555555555556, | |
"acc_stderr,none": 0.009753621805733555, | |
"acc_norm,none": 0.9055555555555556, | |
"acc_norm_stderr,none": 0.009753621805733555, | |
"alias": "belebele_eng_Latn" | |
}, | |
"harness|mmlu_m_es|5": { | |
"acc,none": 0.5768711564421779, | |
"acc_stderr,none": 0.004278699519688541, | |
"alias": "mmlu_m_es" | |
}, | |
"harness|hellaswag_it|10": { | |
"acc,none": 0.4840639617099967, | |
"acc_stderr,none": 0.005212478778642557, | |
"acc_norm,none": 0.6560426411399978, | |
"acc_norm_stderr,none": 0.004954653970651795, | |
"alias": "hellaswag_it" | |
}, | |
"harness|belebele_fra_Latn|5": { | |
"acc,none": 0.8477777777777777, | |
"acc_stderr,none": 0.011981196673569642, | |
"acc_norm,none": 0.8477777777777777, | |
"acc_norm_stderr,none": 0.011981196673569642, | |
"alias": "belebele_fra_Latn" | |
}, | |
"harness|hellaswag_de|10": { | |
"acc,none": 0.4738471391972673, | |
"acc_stderr,none": 0.005159110613278454, | |
"acc_norm,none": 0.6286293766011956, | |
"acc_norm_stderr,none": 0.004992302000579806, | |
"alias": "hellaswag_de" | |
}, | |
"harness|arc_challenge_m_de|25": { | |
"acc,none": 0.4739093242087254, | |
"acc_stderr,none": 0.014610211661428537, | |
"acc_norm,none": 0.5183917878528657, | |
"acc_norm_stderr,none": 0.014620242527326811, | |
"alias": "arc_challenge_m_de" | |
}, | |
"harness|mmlu_m_it|5": { | |
"acc,none": 0.5661403641308453, | |
"acc_stderr,none": 0.004307827500632795, | |
"alias": "mmlu_m_it" | |
}, | |
"harness|hellaswag|10": { | |
"acc,none": 0.6217884883489345, | |
"acc_stderr,none": 0.004839497020536618, | |
"acc_norm,none": 0.8193586934873531, | |
"acc_norm_stderr,none": 0.003839344497192083, | |
"alias": "hellaswag" | |
}, | |
"harness|arc_challenge|25": { | |
"acc,none": 0.5947098976109215, | |
"acc_stderr,none": 0.014346869060229318, | |
"acc_norm,none": 0.6407849829351536, | |
"acc_norm_stderr,none": 0.014020224155839157, | |
"alias": "arc_challenge" | |
}, | |
"harness|arc_challenge_m_es|25": { | |
"acc,none": 0.5153846153846153, | |
"acc_stderr,none": 0.01461696032622132, | |
"acc_norm,none": 0.5547008547008547, | |
"acc_norm_stderr,none": 0.014536106383401226, | |
"alias": "arc_challenge_m_es" | |
}, | |
"harness|hendrycksTest|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-humanities|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-formal_logic|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_european_history|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_us_history|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_world_history|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-international_law|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-jurisprudence|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-logical_fallacies|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-moral_disputes|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-moral_scenarios|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-philosophy|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-prehistory|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_law|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-world_religions|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-other|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-business_ethics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-clinical_knowledge|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_medicine|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-global_facts|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-human_aging|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-management|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-marketing|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-medical_genetics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-miscellaneous|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-nutrition|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_accounting|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_medicine|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-virology|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-social_sciences|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-econometrics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_geography|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_government_and_politics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_macroeconomics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_microeconomics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_psychology|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-human_sexuality|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-professional_psychology|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-public_relations|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-security_studies|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-sociology|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-us_foreign_policy|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-stem|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-abstract_algebra|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-anatomy|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-astronomy|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_biology|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_chemistry|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_computer_science|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_mathematics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-college_physics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-computer_security|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-conceptual_physics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-electrical_engineering|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-elementary_mathematics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_biology|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_chemistry|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_computer_science|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_mathematics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_physics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-high_school_statistics|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|hendrycksTest-machine_learning|5": { | |
"acc,none": 0.6615866685657313, | |
"acc_stderr,none": 0.12826541916322984, | |
"alias": "mmlu" | |
}, | |
"harness|belebele_deu_Latn|5": { | |
"acc,none": 0.8488888888888889, | |
"acc_stderr,none": 0.011945209697456624, | |
"acc_norm,none": 0.8488888888888889, | |
"acc_norm_stderr,none": 0.011945209697456624, | |
"alias": "belebele_deu_Latn" | |
}, | |
"harness|truthfulqa_mc2_m_fr|0": { | |
"acc,none": 0.2782719186785261, | |
"acc_stderr,none": 0.015984910901571035, | |
"alias": "truthfulqa_mc2_m_fr" | |
}, | |
"harness|mmlu_m_de|5": { | |
"acc,none": 0.5568713229748077, | |
"acc_stderr,none": 0.0043143932405325025, | |
"alias": "mmlu_m_de" | |
}, | |
"harness|belebele_ita_Latn|5": { | |
"acc,none": 0.8211111111111111, | |
"acc_stderr,none": 0.012782411716899067, | |
"acc_norm,none": 0.8211111111111111, | |
"acc_norm_stderr,none": 0.012782411716899067, | |
"alias": "belebele_ita_Latn" | |
}, | |
"harness|truthfulqa_mc2_m_de|0": { | |
"acc,none": 0.24619289340101522, | |
"acc_stderr,none": 0.015356084872692898, | |
"alias": "truthfulqa_mc2_m_de" | |
}, | |
"harness|belebele_spa_Latn|5": { | |
"acc,none": 0.8366666666666667, | |
"acc_stderr,none": 0.012329168844652513, | |
"acc_norm,none": 0.8366666666666667, | |
"acc_norm_stderr,none": 0.012329168844652513, | |
"alias": "belebele_spa_Latn" | |
}, | |
"harness|hellaswag_es|10": { | |
"acc,none": 0.5134414337529336, | |
"acc_stderr,none": 0.00516266219718063, | |
"acc_norm,none": 0.6941540430979305, | |
"acc_norm_stderr,none": 0.004759266148217961, | |
"alias": "hellaswag_es" | |
}, | |
"harness|truthfulqa_mc2_m_it|0": { | |
"acc,none": 0.2950191570881226, | |
"acc_stderr,none": 0.016308363772932724, | |
"alias": "truthfulqa_mc2_m_it" | |
} | |
}, | |
"versions": { | |
"harness|hellaswag_fr|10": 1.0, | |
"harness|arc_challenge_m_it|25": 1.0, | |
"harness|gsm8k|5": 2.0, | |
"harness|truthfulqa_mc2|0": 2.0, | |
"harness|arc_challenge_m_fr|25": 1.0, | |
"harness|mmlu_m_fr|5": "Yaml", | |
"harness|truthfulqa_mc2_m_es|0": "Yaml", | |
"harness|belebele_eng_Latn|5": 0.0, | |
"harness|mmlu_m_es|5": "Yaml", | |
"harness|hellaswag_it|10": 1.0, | |
"harness|belebele_fra_Latn|5": 0.0, | |
"harness|hellaswag_de|10": 1.0, | |
"harness|arc_challenge_m_de|25": 1.0, | |
"harness|mmlu_m_it|5": "Yaml", | |
"harness|hellaswag|10": 1.0, | |
"harness|arc_challenge|25": 1.0, | |
"harness|arc_challenge_m_es|25": 1.0, | |
"harness|hendrycksTest|5": "N/A", | |
"harness|hendrycksTest-humanities|5": "N/A", | |
"harness|hendrycksTest-formal_logic|5": "N/A", | |
"harness|hendrycksTest-high_school_european_history|5": "N/A", | |
"harness|hendrycksTest-high_school_us_history|5": "N/A", | |
"harness|hendrycksTest-high_school_world_history|5": "N/A", | |
"harness|hendrycksTest-international_law|5": "N/A", | |
"harness|hendrycksTest-jurisprudence|5": "N/A", | |
"harness|hendrycksTest-logical_fallacies|5": "N/A", | |
"harness|hendrycksTest-moral_disputes|5": "N/A", | |
"harness|hendrycksTest-moral_scenarios|5": "N/A", | |
"harness|hendrycksTest-philosophy|5": "N/A", | |
"harness|hendrycksTest-prehistory|5": "N/A", | |
"harness|hendrycksTest-professional_law|5": "N/A", | |
"harness|hendrycksTest-world_religions|5": "N/A", | |
"harness|hendrycksTest-other|5": "N/A", | |
"harness|hendrycksTest-business_ethics|5": "N/A", | |
"harness|hendrycksTest-clinical_knowledge|5": "N/A", | |
"harness|hendrycksTest-college_medicine|5": "N/A", | |
"harness|hendrycksTest-global_facts|5": "N/A", | |
"harness|hendrycksTest-human_aging|5": "N/A", | |
"harness|hendrycksTest-management|5": "N/A", | |
"harness|hendrycksTest-marketing|5": "N/A", | |
"harness|hendrycksTest-medical_genetics|5": "N/A", | |
"harness|hendrycksTest-miscellaneous|5": "N/A", | |
"harness|hendrycksTest-nutrition|5": "N/A", | |
"harness|hendrycksTest-professional_accounting|5": "N/A", | |
"harness|hendrycksTest-professional_medicine|5": "N/A", | |
"harness|hendrycksTest-virology|5": "N/A", | |
"harness|hendrycksTest-social_sciences|5": "N/A", | |
"harness|hendrycksTest-econometrics|5": "N/A", | |
"harness|hendrycksTest-high_school_geography|5": "N/A", | |
"harness|hendrycksTest-high_school_government_and_politics|5": "N/A", | |
"harness|hendrycksTest-high_school_macroeconomics|5": "N/A", | |
"harness|hendrycksTest-high_school_microeconomics|5": "N/A", | |
"harness|hendrycksTest-high_school_psychology|5": "N/A", | |
"harness|hendrycksTest-human_sexuality|5": "N/A", | |
"harness|hendrycksTest-professional_psychology|5": "N/A", | |
"harness|hendrycksTest-public_relations|5": "N/A", | |
"harness|hendrycksTest-security_studies|5": "N/A", | |
"harness|hendrycksTest-sociology|5": "N/A", | |
"harness|hendrycksTest-us_foreign_policy|5": "N/A", | |
"harness|hendrycksTest-stem|5": "N/A", | |
"harness|hendrycksTest-abstract_algebra|5": "N/A", | |
"harness|hendrycksTest-anatomy|5": "N/A", | |
"harness|hendrycksTest-astronomy|5": "N/A", | |
"harness|hendrycksTest-college_biology|5": "N/A", | |
"harness|hendrycksTest-college_chemistry|5": "N/A", | |
"harness|hendrycksTest-college_computer_science|5": "N/A", | |
"harness|hendrycksTest-college_mathematics|5": "N/A", | |
"harness|hendrycksTest-college_physics|5": "N/A", | |
"harness|hendrycksTest-computer_security|5": "N/A", | |
"harness|hendrycksTest-conceptual_physics|5": "N/A", | |
"harness|hendrycksTest-electrical_engineering|5": "N/A", | |
"harness|hendrycksTest-elementary_mathematics|5": "N/A", | |
"harness|hendrycksTest-high_school_biology|5": "N/A", | |
"harness|hendrycksTest-high_school_chemistry|5": "N/A", | |
"harness|hendrycksTest-high_school_computer_science|5": "N/A", | |
"harness|hendrycksTest-high_school_mathematics|5": "N/A", | |
"harness|hendrycksTest-high_school_physics|5": "N/A", | |
"harness|hendrycksTest-high_school_statistics|5": "N/A", | |
"harness|hendrycksTest-machine_learning|5": "N/A", | |
"harness|belebele_deu_Latn|5": 0.0, | |
"harness|truthfulqa_mc2_m_fr|0": "Yaml", | |
"harness|mmlu_m_de|5": "Yaml", | |
"harness|belebele_ita_Latn|5": 0.0, | |
"harness|truthfulqa_mc2_m_de|0": "Yaml", | |
"harness|belebele_spa_Latn|5": 0.0, | |
"harness|hellaswag_es|10": 1.0, | |
"harness|truthfulqa_mc2_m_it|0": "Yaml" | |
} | |
} |