results
/
failspy
/Smaug-Llama-3-70B-Instruct-abliterated-v3
/results_2024-06-05T22-02-55.033016.json
{ | |
"config_general": { | |
"model_name": "failspy/Smaug-Llama-3-70B-Instruct-abliterated-v3", | |
"model_dtype": "float16", | |
"model_size": 0 | |
}, | |
"results": { | |
"harness-c_arc_challenge": { | |
"acc_norm": 57.85, | |
"acc_stderr": 0, | |
"c_arc_challenge_25shot_acc": 53.33, | |
"c_arc_challenge_25shot_acc_norm": 57.85 | |
}, | |
"harness-c_gsm8k": { | |
"acc": 68.92, | |
"acc_stderr": 0, | |
"c_gsm8k_5shot_acc": 68.92 | |
}, | |
"harness-c_hellaswag": { | |
"acc_norm": 63.52, | |
"acc_stderr": 0, | |
"c_hellaswag_10shot_acc": 46.54, | |
"c_hellaswag_10shot_acc_norm": 63.52 | |
}, | |
"harness-c-sem-v2": { | |
"acc": 86.575, | |
"acc_stderr": 0, | |
"c_sem_v2-LLSRC_5shot_acc": 90.36, | |
"c_sem_v2-SLPWC_5shot_acc": 84.29, | |
"c_sem_v2-SLRFC_5shot_acc": 87.77, | |
"c_sem_v2-SLSRC_5shot_acc": 83.88, | |
"c_sem_v2-LLSRC_5shot_acc_norm": 90.36, | |
"c_sem_v2-SLPWC_5shot_acc_norm": 84.29, | |
"c_sem_v2-SLRFC_5shot_acc_norm": 87.77, | |
"c_sem_v2-SLSRC_5shot_acc_norm": 83.88 | |
}, | |
"harness-c_truthfulqa_mc": { | |
"mc2": 54.21, | |
"acc_stderr": 0, | |
"c_truthfulqa_mc_0shot_mc1": 33.66, | |
"c_truthfulqa_mc_0shot_mc2": 54.21 | |
}, | |
"harness-c_winogrande": { | |
"acc": 62.04, | |
"acc_stderr": 0, | |
"c_winogrande_0shot_acc": 62.04 | |
}, | |
"harness-cmmlu": { | |
"acc_norm": 72.08, | |
"acc_stderr": 0, | |
"cmmlu_fullavg_5shot_acc": 72.08, | |
"cmmlu-virology_5shot_acc": 55.42, | |
"cmmlu-sociology_5shot_acc": 85.07, | |
"cmmlu-world_religions_5shot_acc": 80.7, | |
"cmmlu-professional_law_5shot_acc": 50.26, | |
"cmmlu-public_relations_5shot_acc": 69.09, | |
"cmmlu-security_studies_5shot_acc": 77.14, | |
"cmmlu-us_foreign_policy_5shot_acc": 83.0, | |
"cmmlu-professional_medicine_5shot_acc": 76.47, | |
"cmmlu-professional_psychology_5shot_acc": 71.57, | |
"cmmlu_fullavg_5shot_acc_norm": 72.08, | |
"cmmlu-virology_5shot_acc_norm": 55.42, | |
"cmmlu-sociology_5shot_acc_norm": 85.07, | |
"cmmlu-world_religions_5shot_acc_norm": 80.7, | |
"cmmlu-professional_law_5shot_acc_norm": 50.26, | |
"cmmlu-public_relations_5shot_acc_norm": 69.09, | |
"cmmlu-security_studies_5shot_acc_norm": 77.14, | |
"cmmlu-us_foreign_policy_5shot_acc_norm": 83.0, | |
"cmmlu-professional_medicine_5shot_acc_norm": 76.47, | |
"cmmlu-professional_psychology_5shot_acc_norm": 71.57 | |
} | |
}, | |
"versions": {}, | |
"config_tasks": {}, | |
"summary_tasks": {}, | |
"summary_general": {} | |
} |