|
{ |
|
"config_general": { |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": "auto:6", |
|
"max_samples": "null", |
|
"job_id": "", |
|
"model_name": "croissantllm/CroissantCool-v0.2", |
|
"model_sha": "", |
|
"model_dtype": "torch.bfloat16", |
|
"model_size": "" |
|
}, |
|
"results": { |
|
"harness|truthfulqa_mc2_m_de|0": { |
|
"acc,none": 0.22588832487309646, |
|
"acc_stderr,none": 0.01490600748558298, |
|
"alias": "truthfulqa_mc2_m_de" |
|
}, |
|
"harness|truthfulqa_mc2_m_es|0": { |
|
"acc,none": 0.23954372623574144, |
|
"acc_stderr,none": 0.015204299479455162, |
|
"alias": "truthfulqa_mc2_m_es" |
|
}, |
|
"harness|arc_challenge_m_it|25": { |
|
"acc,none": 0.21642429426860565, |
|
"acc_stderr,none": 0.012049584313260761, |
|
"acc_norm,none": 0.2549187339606501, |
|
"acc_norm_stderr,none": 0.01275208729343999, |
|
"alias": "arc_challenge_m_it" |
|
}, |
|
"harness|mmlu_m_de|5": { |
|
"acc,none": 0.2467943882938603, |
|
"acc_stderr,none": 0.0037445685510601057, |
|
"alias": "mmlu_m_de" |
|
}, |
|
"harness|belebele_ita_Latn|5": { |
|
"acc,none": 0.24888888888888888, |
|
"acc_stderr,none": 0.014420323451642543, |
|
"acc_norm,none": 0.24888888888888888, |
|
"acc_norm_stderr,none": 0.014420323451642543, |
|
"alias": "belebele_ita_Latn" |
|
}, |
|
"harness|mmlu_m_fr|5": { |
|
"acc,none": 0.23879000840271944, |
|
"acc_stderr,none": 0.003726408760837494, |
|
"alias": "mmlu_m_fr" |
|
}, |
|
"harness|belebele_eng_Latn|5": { |
|
"acc,none": 0.24, |
|
"acc_stderr,none": 0.014244019879792654, |
|
"acc_norm,none": 0.24, |
|
"acc_norm_stderr,none": 0.014244019879792654, |
|
"alias": "belebele_eng_Latn" |
|
}, |
|
"harness|truthfulqa_mc2_m_it|0": { |
|
"acc,none": 0.23243933588761176, |
|
"acc_stderr,none": 0.015104550008905692, |
|
"alias": "truthfulqa_mc2_m_it" |
|
}, |
|
"harness|arc_challenge_m_de|25": { |
|
"acc,none": 0.20786997433704021, |
|
"acc_stderr,none": 0.011873334349879949, |
|
"acc_norm,none": 0.23609923011120615, |
|
"acc_norm_stderr,none": 0.012426371635795895, |
|
"alias": "arc_challenge_m_de" |
|
}, |
|
"harness|mmlu_m_es|5": { |
|
"acc,none": 0.24441277936103195, |
|
"acc_stderr,none": 0.003721691043472131, |
|
"alias": "mmlu_m_es" |
|
}, |
|
"harness|gsm8k|5": { |
|
"exact_match,get-answer": 0.0, |
|
"exact_match_stderr,get-answer": 0.0, |
|
"alias": "gsm8k" |
|
}, |
|
"harness|arc_challenge_m_es|25": { |
|
"acc,none": 0.2222222222222222, |
|
"acc_stderr,none": 0.012159459003657435, |
|
"acc_norm,none": 0.2358974358974359, |
|
"acc_norm_stderr,none": 0.012417386043886862, |
|
"alias": "arc_challenge_m_es" |
|
}, |
|
"harness|belebele_fra_Latn|5": { |
|
"acc,none": 0.22333333333333333, |
|
"acc_stderr,none": 0.013890384297198721, |
|
"acc_norm,none": 0.22333333333333333, |
|
"acc_norm_stderr,none": 0.013890384297198721, |
|
"alias": "belebele_fra_Latn" |
|
}, |
|
"harness|arc_challenge_m_fr|25": { |
|
"acc,none": 0.262617621899059, |
|
"acc_stderr,none": 0.012876175520452839, |
|
"acc_norm,none": 0.30710008554319934, |
|
"acc_norm_stderr,none": 0.013497514452902142, |
|
"alias": "arc_challenge_m_fr" |
|
}, |
|
"harness|belebele_spa_Latn|5": { |
|
"acc,none": 0.26, |
|
"acc_stderr,none": 0.014629271097998376, |
|
"acc_norm,none": 0.26, |
|
"acc_norm_stderr,none": 0.014629271097998376, |
|
"alias": "belebele_spa_Latn" |
|
}, |
|
"harness|mmlu_m_it|5": { |
|
"acc,none": 0.24242653169147088, |
|
"acc_stderr,none": 0.0037249810960834737, |
|
"alias": "mmlu_m_it" |
|
}, |
|
"harness|arc_challenge|25": { |
|
"acc,none": 0.2738907849829352, |
|
"acc_stderr,none": 0.013032004972989506, |
|
"acc_norm,none": 0.31399317406143346, |
|
"acc_norm_stderr,none": 0.013562691224726284, |
|
"alias": "arc_challenge" |
|
}, |
|
"harness|hendrycksTest|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-humanities|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-formal_logic|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_european_history|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_us_history|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_world_history|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-international_law|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-jurisprudence|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-logical_fallacies|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-moral_disputes|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-moral_scenarios|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-philosophy|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-prehistory|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-professional_law|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-world_religions|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-other|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-business_ethics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-clinical_knowledge|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-college_medicine|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-global_facts|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-human_aging|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-management|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-marketing|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-medical_genetics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-miscellaneous|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-nutrition|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-professional_accounting|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-professional_medicine|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-virology|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-social_sciences|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-econometrics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_geography|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_government_and_politics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_macroeconomics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_microeconomics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_psychology|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-human_sexuality|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-professional_psychology|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-public_relations|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-security_studies|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-sociology|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-us_foreign_policy|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-stem|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-abstract_algebra|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-anatomy|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-astronomy|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-college_biology|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-college_chemistry|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-college_computer_science|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-college_mathematics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-college_physics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-computer_security|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-conceptual_physics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-electrical_engineering|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-elementary_mathematics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_biology|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_chemistry|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_computer_science|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_mathematics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_physics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-high_school_statistics|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hendrycksTest-machine_learning|5": { |
|
"acc,none": 0.24398233869819114, |
|
"acc_stderr,none": 0.03858336876902983, |
|
"alias": "mmlu" |
|
}, |
|
"harness|hellaswag|10": { |
|
"acc,none": 0.41854212308305117, |
|
"acc_stderr,none": 0.0049231178497402914, |
|
"acc_norm,none": 0.5495917147978491, |
|
"acc_norm_stderr,none": 0.004965177633049912, |
|
"alias": "hellaswag" |
|
}, |
|
"harness|hellaswag_es|10": { |
|
"acc,none": 0.31480691273735867, |
|
"acc_stderr,none": 0.004797216381751611, |
|
"acc_norm,none": 0.3693193940687007, |
|
"acc_norm_stderr,none": 0.004985015198762361, |
|
"alias": "hellaswag_es" |
|
}, |
|
"harness|hellaswag_de|10": { |
|
"acc,none": 0.28981639624252775, |
|
"acc_stderr,none": 0.004687556727804856, |
|
"acc_norm,none": 0.3121263877028181, |
|
"acc_norm_stderr,none": 0.004787615979437613, |
|
"alias": "hellaswag_de" |
|
}, |
|
"harness|hellaswag_it|10": { |
|
"acc,none": 0.30088110518873057, |
|
"acc_stderr,none": 0.004783744433604262, |
|
"acc_norm,none": 0.34896116610464484, |
|
"acc_norm_stderr,none": 0.004971494796857801, |
|
"alias": "hellaswag_it" |
|
}, |
|
"harness|truthfulqa_mc2_m_fr|0": { |
|
"acc,none": 0.21855146124523506, |
|
"acc_stderr,none": 0.014740620860385406, |
|
"alias": "truthfulqa_mc2_m_fr" |
|
}, |
|
"harness|truthfulqa_mc2|0": { |
|
"acc,none": 0.393879839368423, |
|
"acc_stderr,none": 0.014554339369270685, |
|
"alias": "truthfulqa_mc2" |
|
}, |
|
"harness|hellaswag_fr|10": { |
|
"acc,none": 0.4089740843863782, |
|
"acc_stderr,none": 0.005088004388421943, |
|
"acc_norm,none": 0.5261297922467337, |
|
"acc_norm_stderr,none": 0.005167404636945407, |
|
"alias": "hellaswag_fr" |
|
}, |
|
"harness|belebele_deu_Latn|5": { |
|
"acc,none": 0.2633333333333333, |
|
"acc_stderr,none": 0.014689553047342528, |
|
"acc_norm,none": 0.2633333333333333, |
|
"acc_norm_stderr,none": 0.014689553047342528, |
|
"alias": "belebele_deu_Latn" |
|
} |
|
}, |
|
"versions": { |
|
"harness|truthfulqa_mc2_m_de|0": "Yaml", |
|
"harness|truthfulqa_mc2_m_es|0": "Yaml", |
|
"harness|arc_challenge_m_it|25": 1.0, |
|
"harness|mmlu_m_de|5": "Yaml", |
|
"harness|belebele_ita_Latn|5": 0.0, |
|
"harness|mmlu_m_fr|5": "Yaml", |
|
"harness|belebele_eng_Latn|5": 0.0, |
|
"harness|truthfulqa_mc2_m_it|0": "Yaml", |
|
"harness|arc_challenge_m_de|25": 1.0, |
|
"harness|mmlu_m_es|5": "Yaml", |
|
"harness|gsm8k|5": 2.0, |
|
"harness|arc_challenge_m_es|25": 1.0, |
|
"harness|belebele_fra_Latn|5": 0.0, |
|
"harness|arc_challenge_m_fr|25": 1.0, |
|
"harness|belebele_spa_Latn|5": 0.0, |
|
"harness|mmlu_m_it|5": "Yaml", |
|
"harness|arc_challenge|25": 1.0, |
|
"harness|hendrycksTest|5": "N/A", |
|
"harness|hendrycksTest-humanities|5": "N/A", |
|
"harness|hendrycksTest-formal_logic|5": "N/A", |
|
"harness|hendrycksTest-high_school_european_history|5": "N/A", |
|
"harness|hendrycksTest-high_school_us_history|5": "N/A", |
|
"harness|hendrycksTest-high_school_world_history|5": "N/A", |
|
"harness|hendrycksTest-international_law|5": "N/A", |
|
"harness|hendrycksTest-jurisprudence|5": "N/A", |
|
"harness|hendrycksTest-logical_fallacies|5": "N/A", |
|
"harness|hendrycksTest-moral_disputes|5": "N/A", |
|
"harness|hendrycksTest-moral_scenarios|5": "N/A", |
|
"harness|hendrycksTest-philosophy|5": "N/A", |
|
"harness|hendrycksTest-prehistory|5": "N/A", |
|
"harness|hendrycksTest-professional_law|5": "N/A", |
|
"harness|hendrycksTest-world_religions|5": "N/A", |
|
"harness|hendrycksTest-other|5": "N/A", |
|
"harness|hendrycksTest-business_ethics|5": "N/A", |
|
"harness|hendrycksTest-clinical_knowledge|5": "N/A", |
|
"harness|hendrycksTest-college_medicine|5": "N/A", |
|
"harness|hendrycksTest-global_facts|5": "N/A", |
|
"harness|hendrycksTest-human_aging|5": "N/A", |
|
"harness|hendrycksTest-management|5": "N/A", |
|
"harness|hendrycksTest-marketing|5": "N/A", |
|
"harness|hendrycksTest-medical_genetics|5": "N/A", |
|
"harness|hendrycksTest-miscellaneous|5": "N/A", |
|
"harness|hendrycksTest-nutrition|5": "N/A", |
|
"harness|hendrycksTest-professional_accounting|5": "N/A", |
|
"harness|hendrycksTest-professional_medicine|5": "N/A", |
|
"harness|hendrycksTest-virology|5": "N/A", |
|
"harness|hendrycksTest-social_sciences|5": "N/A", |
|
"harness|hendrycksTest-econometrics|5": "N/A", |
|
"harness|hendrycksTest-high_school_geography|5": "N/A", |
|
"harness|hendrycksTest-high_school_government_and_politics|5": "N/A", |
|
"harness|hendrycksTest-high_school_macroeconomics|5": "N/A", |
|
"harness|hendrycksTest-high_school_microeconomics|5": "N/A", |
|
"harness|hendrycksTest-high_school_psychology|5": "N/A", |
|
"harness|hendrycksTest-human_sexuality|5": "N/A", |
|
"harness|hendrycksTest-professional_psychology|5": "N/A", |
|
"harness|hendrycksTest-public_relations|5": "N/A", |
|
"harness|hendrycksTest-security_studies|5": "N/A", |
|
"harness|hendrycksTest-sociology|5": "N/A", |
|
"harness|hendrycksTest-us_foreign_policy|5": "N/A", |
|
"harness|hendrycksTest-stem|5": "N/A", |
|
"harness|hendrycksTest-abstract_algebra|5": "N/A", |
|
"harness|hendrycksTest-anatomy|5": "N/A", |
|
"harness|hendrycksTest-astronomy|5": "N/A", |
|
"harness|hendrycksTest-college_biology|5": "N/A", |
|
"harness|hendrycksTest-college_chemistry|5": "N/A", |
|
"harness|hendrycksTest-college_computer_science|5": "N/A", |
|
"harness|hendrycksTest-college_mathematics|5": "N/A", |
|
"harness|hendrycksTest-college_physics|5": "N/A", |
|
"harness|hendrycksTest-computer_security|5": "N/A", |
|
"harness|hendrycksTest-conceptual_physics|5": "N/A", |
|
"harness|hendrycksTest-electrical_engineering|5": "N/A", |
|
"harness|hendrycksTest-elementary_mathematics|5": "N/A", |
|
"harness|hendrycksTest-high_school_biology|5": "N/A", |
|
"harness|hendrycksTest-high_school_chemistry|5": "N/A", |
|
"harness|hendrycksTest-high_school_computer_science|5": "N/A", |
|
"harness|hendrycksTest-high_school_mathematics|5": "N/A", |
|
"harness|hendrycksTest-high_school_physics|5": "N/A", |
|
"harness|hendrycksTest-high_school_statistics|5": "N/A", |
|
"harness|hendrycksTest-machine_learning|5": "N/A", |
|
"harness|hellaswag|10": 1.0, |
|
"harness|hellaswag_es|10": 1.0, |
|
"harness|hellaswag_de|10": 1.0, |
|
"harness|hellaswag_it|10": 1.0, |
|
"harness|truthfulqa_mc2_m_fr|0": "Yaml", |
|
"harness|truthfulqa_mc2|0": 2.0, |
|
"harness|hellaswag_fr|10": 1.0, |
|
"harness|belebele_deu_Latn|5": 0.0 |
|
} |
|
} |