barthfab's picture
uploading results from VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct
2b4bf0f verified
raw
history blame
21.8 kB
{
"config_general": {
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": "auto:6",
"max_samples": "null",
"job_id": "",
"model_name": "VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct",
"model_sha": "",
"model_dtype": "torch.bfloat16",
"model_size": ""
},
"results": {
"harness|truthfulqa_mc2_m_de|0": {
"acc,none": 0.2956852791878173,
"acc_stderr,none": 0.01626712741645017,
"alias": "truthfulqa_mc2_m_de"
},
"harness|truthfulqa_mc2_m_es|0": {
"acc,none": 0.3358681875792142,
"acc_stderr,none": 0.01682474414131292,
"alias": "truthfulqa_mc2_m_es"
},
"harness|arc_challenge_m_it|25": {
"acc,none": 0.5945252352437981,
"acc_stderr,none": 0.014366323465528179,
"acc_norm,none": 0.6193327630453379,
"acc_norm_stderr,none": 0.014207359046710982,
"alias": "arc_challenge_m_it"
},
"harness|mmlu_m_de|5": {
"acc,none": 0.5644139387539598,
"acc_stderr,none": 0.004306388668808089,
"alias": "mmlu_m_de"
},
"harness|belebele_ita_Latn|5": {
"acc,none": 0.8333333333333334,
"acc_stderr,none": 0.012429507075907734,
"acc_norm,none": 0.8333333333333334,
"acc_norm_stderr,none": 0.012429507075907734,
"alias": "belebele_ita_Latn"
},
"harness|mmlu_m_fr|5": {
"acc,none": 0.5821556794744481,
"acc_stderr,none": 0.004310791301464171,
"alias": "mmlu_m_fr"
},
"harness|belebele_eng_Latn|5": {
"acc,none": 0.9166666666666666,
"acc_stderr,none": 0.009217969157169199,
"acc_norm,none": 0.9166666666666666,
"acc_norm_stderr,none": 0.009217969157169199,
"alias": "belebele_eng_Latn"
},
"harness|truthfulqa_mc2_m_it|0": {
"acc,none": 0.3103448275862069,
"acc_stderr,none": 0.016543785026048318,
"alias": "truthfulqa_mc2_m_it"
},
"harness|arc_challenge_m_de|25": {
"acc,none": 0.5500427715996579,
"acc_stderr,none": 0.014556683049829988,
"acc_norm,none": 0.5936698032506416,
"acc_norm_stderr,none": 0.014371119725222025,
"alias": "arc_challenge_m_de"
},
"harness|mmlu_m_es|5": {
"acc,none": 0.5901454927253638,
"acc_stderr,none": 0.004259223847756214,
"alias": "mmlu_m_es"
},
"harness|gsm8k|5": {
"exact_match,get-answer": 0.6846095526914329,
"exact_match_stderr,get-answer": 0.012799353675801832,
"alias": "gsm8k"
},
"harness|arc_challenge_m_es|25": {
"acc,none": 0.6205128205128205,
"acc_stderr,none": 0.014192754090886751,
"acc_norm,none": 0.6410256410256411,
"acc_norm_stderr,none": 0.014030145004220061,
"alias": "arc_challenge_m_es"
},
"harness|belebele_fra_Latn|5": {
"acc,none": 0.8766666666666667,
"acc_stderr,none": 0.010966742231624086,
"acc_norm,none": 0.8766666666666667,
"acc_norm_stderr,none": 0.010966742231624086,
"alias": "belebele_fra_Latn"
},
"harness|arc_challenge_m_fr|25": {
"acc,none": 0.5996578272027374,
"acc_stderr,none": 0.014336594541437478,
"acc_norm,none": 0.630453378956373,
"acc_norm_stderr,none": 0.014123413837443291,
"alias": "arc_challenge_m_fr"
},
"harness|belebele_spa_Latn|5": {
"acc,none": 0.8744444444444445,
"acc_stderr,none": 0.011051067526018464,
"acc_norm,none": 0.8744444444444445,
"acc_norm_stderr,none": 0.011051067526018464,
"alias": "belebele_spa_Latn"
},
"harness|mmlu_m_it|5": {
"acc,none": 0.5744504041701292,
"acc_stderr,none": 0.004297570220538641,
"alias": "mmlu_m_it"
},
"harness|arc_challenge|25": {
"acc,none": 0.7303754266211604,
"acc_stderr,none": 0.01296804068686914,
"acc_norm,none": 0.7448805460750854,
"acc_norm_stderr,none": 0.012739038695202109,
"alias": "arc_challenge"
},
"harness|hendrycksTest|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-humanities|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-formal_logic|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-international_law|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-jurisprudence|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-moral_disputes|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-philosophy|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-prehistory|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-professional_law|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-world_religions|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-other|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-business_ethics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-college_medicine|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-global_facts|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-human_aging|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-management|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-marketing|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-medical_genetics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-miscellaneous|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-nutrition|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-professional_accounting|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-professional_medicine|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-virology|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-social_sciences|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-econometrics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_geography|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-human_sexuality|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-professional_psychology|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-public_relations|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-security_studies|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-sociology|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-stem|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-anatomy|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-astronomy|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-college_biology|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-college_chemistry|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-college_computer_science|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-college_mathematics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-college_physics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-computer_security|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_biology|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_physics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hendrycksTest-machine_learning|5": {
"acc,none": 0.6665004985044866,
"acc_stderr,none": 0.12578286666329366,
"alias": "mmlu"
},
"harness|hellaswag|10": {
"acc,none": 0.7454690300736905,
"acc_stderr,none": 0.004347070019527486,
"acc_norm,none": 0.8956383190599482,
"acc_norm_stderr,none": 0.0030510433788747708,
"alias": "hellaswag"
},
"harness|hellaswag_es|10": {
"acc,none": 0.5746746319607425,
"acc_stderr,none": 0.005106605961593097,
"acc_norm,none": 0.7491999146575635,
"acc_norm_stderr,none": 0.004477374186359933,
"alias": "hellaswag_es"
},
"harness|hellaswag_de|10": {
"acc,none": 0.5146242527754057,
"acc_stderr,none": 0.005163972260259133,
"acc_norm,none": 0.6642826643894107,
"acc_norm_stderr,none": 0.004879362489588698,
"alias": "hellaswag_de"
},
"harness|hellaswag_it|10": {
"acc,none": 0.5330142499728054,
"acc_stderr,none": 0.005203747512549052,
"acc_norm,none": 0.7029261394539323,
"acc_norm_stderr,none": 0.004766307515131304,
"alias": "hellaswag_it"
},
"harness|truthfulqa_mc2_m_fr|0": {
"acc,none": 0.33672172808132145,
"acc_stderr,none": 0.0168566854595136,
"alias": "truthfulqa_mc2_m_fr"
},
"harness|truthfulqa_mc2|0": {
"acc,none": 0.661926387370984,
"acc_stderr,none": 0.015357570618077165,
"alias": "truthfulqa_mc2"
},
"harness|hellaswag_fr|10": {
"acc,none": 0.5593274791175841,
"acc_stderr,none": 0.005137920542961698,
"acc_norm,none": 0.7264938959091882,
"acc_norm_stderr,none": 0.004613131313984876,
"alias": "hellaswag_fr"
},
"harness|belebele_deu_Latn|5": {
"acc,none": 0.8855555555555555,
"acc_stderr,none": 0.010617576963634278,
"acc_norm,none": 0.8855555555555555,
"acc_norm_stderr,none": 0.010617576963634278,
"alias": "belebele_deu_Latn"
}
},
"versions": {
"harness|truthfulqa_mc2_m_de|0": "Yaml",
"harness|truthfulqa_mc2_m_es|0": "Yaml",
"harness|arc_challenge_m_it|25": 1.0,
"harness|mmlu_m_de|5": "Yaml",
"harness|belebele_ita_Latn|5": 0.0,
"harness|mmlu_m_fr|5": "Yaml",
"harness|belebele_eng_Latn|5": 0.0,
"harness|truthfulqa_mc2_m_it|0": "Yaml",
"harness|arc_challenge_m_de|25": 1.0,
"harness|mmlu_m_es|5": "Yaml",
"harness|gsm8k|5": 2.0,
"harness|arc_challenge_m_es|25": 1.0,
"harness|belebele_fra_Latn|5": 0.0,
"harness|arc_challenge_m_fr|25": 1.0,
"harness|belebele_spa_Latn|5": 0.0,
"harness|mmlu_m_it|5": "Yaml",
"harness|arc_challenge|25": 1.0,
"harness|hendrycksTest|5": "N/A",
"harness|hendrycksTest-humanities|5": "N/A",
"harness|hendrycksTest-formal_logic|5": "N/A",
"harness|hendrycksTest-high_school_european_history|5": "N/A",
"harness|hendrycksTest-high_school_us_history|5": "N/A",
"harness|hendrycksTest-high_school_world_history|5": "N/A",
"harness|hendrycksTest-international_law|5": "N/A",
"harness|hendrycksTest-jurisprudence|5": "N/A",
"harness|hendrycksTest-logical_fallacies|5": "N/A",
"harness|hendrycksTest-moral_disputes|5": "N/A",
"harness|hendrycksTest-moral_scenarios|5": "N/A",
"harness|hendrycksTest-philosophy|5": "N/A",
"harness|hendrycksTest-prehistory|5": "N/A",
"harness|hendrycksTest-professional_law|5": "N/A",
"harness|hendrycksTest-world_religions|5": "N/A",
"harness|hendrycksTest-other|5": "N/A",
"harness|hendrycksTest-business_ethics|5": "N/A",
"harness|hendrycksTest-clinical_knowledge|5": "N/A",
"harness|hendrycksTest-college_medicine|5": "N/A",
"harness|hendrycksTest-global_facts|5": "N/A",
"harness|hendrycksTest-human_aging|5": "N/A",
"harness|hendrycksTest-management|5": "N/A",
"harness|hendrycksTest-marketing|5": "N/A",
"harness|hendrycksTest-medical_genetics|5": "N/A",
"harness|hendrycksTest-miscellaneous|5": "N/A",
"harness|hendrycksTest-nutrition|5": "N/A",
"harness|hendrycksTest-professional_accounting|5": "N/A",
"harness|hendrycksTest-professional_medicine|5": "N/A",
"harness|hendrycksTest-virology|5": "N/A",
"harness|hendrycksTest-social_sciences|5": "N/A",
"harness|hendrycksTest-econometrics|5": "N/A",
"harness|hendrycksTest-high_school_geography|5": "N/A",
"harness|hendrycksTest-high_school_government_and_politics|5": "N/A",
"harness|hendrycksTest-high_school_macroeconomics|5": "N/A",
"harness|hendrycksTest-high_school_microeconomics|5": "N/A",
"harness|hendrycksTest-high_school_psychology|5": "N/A",
"harness|hendrycksTest-human_sexuality|5": "N/A",
"harness|hendrycksTest-professional_psychology|5": "N/A",
"harness|hendrycksTest-public_relations|5": "N/A",
"harness|hendrycksTest-security_studies|5": "N/A",
"harness|hendrycksTest-sociology|5": "N/A",
"harness|hendrycksTest-us_foreign_policy|5": "N/A",
"harness|hendrycksTest-stem|5": "N/A",
"harness|hendrycksTest-abstract_algebra|5": "N/A",
"harness|hendrycksTest-anatomy|5": "N/A",
"harness|hendrycksTest-astronomy|5": "N/A",
"harness|hendrycksTest-college_biology|5": "N/A",
"harness|hendrycksTest-college_chemistry|5": "N/A",
"harness|hendrycksTest-college_computer_science|5": "N/A",
"harness|hendrycksTest-college_mathematics|5": "N/A",
"harness|hendrycksTest-college_physics|5": "N/A",
"harness|hendrycksTest-computer_security|5": "N/A",
"harness|hendrycksTest-conceptual_physics|5": "N/A",
"harness|hendrycksTest-electrical_engineering|5": "N/A",
"harness|hendrycksTest-elementary_mathematics|5": "N/A",
"harness|hendrycksTest-high_school_biology|5": "N/A",
"harness|hendrycksTest-high_school_chemistry|5": "N/A",
"harness|hendrycksTest-high_school_computer_science|5": "N/A",
"harness|hendrycksTest-high_school_mathematics|5": "N/A",
"harness|hendrycksTest-high_school_physics|5": "N/A",
"harness|hendrycksTest-high_school_statistics|5": "N/A",
"harness|hendrycksTest-machine_learning|5": "N/A",
"harness|hellaswag|10": 1.0,
"harness|hellaswag_es|10": 1.0,
"harness|hellaswag_de|10": 1.0,
"harness|hellaswag_it|10": 1.0,
"harness|truthfulqa_mc2_m_fr|0": "Yaml",
"harness|truthfulqa_mc2|0": 2.0,
"harness|hellaswag_fr|10": 1.0,
"harness|belebele_deu_Latn|5": 0.0
}
}