results / ValiantLabs /Llama3-70B-ShiningValiant2 /results_2024-07-01T20-11-23.122954.json
zhaoliao's picture
Update ValiantLabs/Llama3-70B-ShiningValiant2/results_2024-07-01T20-11-23.122954.json
73d2b67 verified
raw
history blame
3.72 kB
{
"config_general": {
"model_name": "ValiantLabs/Llama3-70B-ShiningValiant2",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"harness-c_arc_challenge": {
"acc_norm": 57.85,
"acc_stderr": 0,
"c_arc_challenge_25shot_acc": 52.9,
"c_arc_challenge_25shot_acc_norm": 57.85
},
"harness-c_gsm8k": {
"acc": 62.62,
"acc_stderr": 0,
"c_gsm8k_5shot_acc": 62.62
},
"harness-c_hellaswag": {
"acc_norm": 62.59,
"acc_stderr": 0,
"c_hellaswag_10shot_acc": 45.95,
"c_hellaswag_10shot_acc_norm": 62.59
},
"harness-c-sem-v2": {
"acc": 86.0375,
"acc_stderr": 0,
"c_sem_v2-LLSRC_5shot_acc": 90.07,
"c_sem_v2-SLPWC_5shot_acc": 81.71,
"c_sem_v2-SLRFC_5shot_acc": 87.48,
"c_sem_v2-SLSRC_5shot_acc": 84.89,
"c_sem_v2-LLSRC_5shot_acc_norm": 90.07,
"c_sem_v2-SLPWC_5shot_acc_norm": 81.71,
"c_sem_v2-SLRFC_5shot_acc_norm": 87.48,
"c_sem_v2-SLSRC_5shot_acc_norm": 84.89
},
"harness-c_truthfulqa_mc": {
"mc2": 53.61,
"acc_stderr": 0,
"c_truthfulqa_mc_0shot_mc1": 30.11,
"c_truthfulqa_mc_0shot_mc2": 53.61
},
"harness-c_winogrande": {
"acc": 65.67,
"acc_stderr": 0,
"c_winogrande_0shot_acc": 65.67
},
"CLCC-H": {
"acc": 0,
"acc_stderr": 0
},
"harness-cmmlu": {
"acc_norm": 68.74,
"acc_stderr": 0,
"cmmlu_fullavg_5shot_acc": 68.74,
"cmmlu-virology_5shot_acc": 53.01,
"cmmlu-nutrition_5shot_acc": 81.37,
"cmmlu-sociology_5shot_acc": 84.08,
"cmmlu-philosophy_5shot_acc": 67.2,
"cmmlu-prehistory_5shot_acc": 75.62,
"cmmlu-miscellaneous_5shot_acc": 76.76,
"cmmlu-moral_disputes_5shot_acc": 66.18,
"cmmlu-moral_scenarios_5shot_acc": 44.8,
"cmmlu-world_religions_5shot_acc": 78.95,
"cmmlu-professional_law_5shot_acc": 50.33,
"cmmlu-public_relations_5shot_acc": 67.27,
"cmmlu-security_studies_5shot_acc": 73.88,
"cmmlu-us_foreign_policy_5shot_acc": 85.0,
"cmmlu-professional_medicine_5shot_acc": 75.37,
"cmmlu-professional_accounting_5shot_acc": 52.48,
"cmmlu-professional_psychology_5shot_acc": 67.48,
"cmmlu_fullavg_5shot_acc_norm": 68.74,
"cmmlu-virology_5shot_acc_norm": 53.01,
"cmmlu-nutrition_5shot_acc_norm": 81.37,
"cmmlu-sociology_5shot_acc_norm": 84.08,
"cmmlu-philosophy_5shot_acc_norm": 67.2,
"cmmlu-prehistory_5shot_acc_norm": 75.62,
"cmmlu-miscellaneous_5shot_acc_norm": 76.76,
"cmmlu-moral_disputes_5shot_acc_norm": 66.18,
"cmmlu-moral_scenarios_5shot_acc_norm": 44.8,
"cmmlu-world_religions_5shot_acc_norm": 78.95,
"cmmlu-professional_law_5shot_acc_norm": 50.33,
"cmmlu-public_relations_5shot_acc_norm": 67.27,
"cmmlu-security_studies_5shot_acc_norm": 73.88,
"cmmlu-us_foreign_policy_5shot_acc_norm": 85.0,
"cmmlu-professional_medicine_5shot_acc_norm": 75.37,
"cmmlu-professional_accounting_5shot_acc_norm": 52.48,
"cmmlu-professional_psychology_5shot_acc_norm": 67.48
}
},
"versions": {},
"config_tasks": {},
"summary_tasks": {},
"summary_general": {}
}