{ "config_general": { "model_name": "Qwen/Qwen2-72B-Instruct", "model_dtype": "float16", "model_size": 0 }, "results": { "harness-c_arc_challenge": { "acc_norm": 66.47, "acc_stderr": 0, "c_arc_challenge_25shot_acc": 59.81, "c_arc_challenge_25shot_acc_norm": 66.47 }, "harness-c_gsm8k": { "acc": 78.7, "acc_stderr": 0, "c_gsm8k_5shot_acc": 78.7 }, "harness-c_hellaswag": { "acc_norm": 73.93, "acc_stderr": 0, "c_hellaswag_10shot_acc": 55.41, "c_hellaswag_10shot_acc_norm": 73.93 }, "harness-c-sem-v2": { "acc": 94.115, "acc_stderr": 0, "c_sem_v2-LLSRC_5shot_acc": 95.11, "c_sem_v2-SLPWC_5shot_acc": 93.0, "c_sem_v2-SLRFC_5shot_acc": 97.99, "c_sem_v2-SLSRC_5shot_acc": 90.36, "c_sem_v2-LLSRC_5shot_acc_norm": 95.11, "c_sem_v2-SLPWC_5shot_acc_norm": 93.0, "c_sem_v2-SLRFC_5shot_acc_norm": 97.99, "c_sem_v2-SLSRC_5shot_acc_norm": 90.36 }, "harness-c_truthfulqa_mc": { "mc2": 62.52, "acc_stderr": 0, "c_truthfulqa_mc_0shot_mc1": 42.47, "c_truthfulqa_mc_0shot_mc2": 62.52 }, "harness-c_winogrande": { "acc": 70.01, "acc_stderr": 0, "c_winogrande_0shot_acc": 70.01 }, "CLCC-H": { "acc": 0.8121, "acc_stderr": 0 }, "harness-cmmlu": { "acc_norm": 78.4, "acc_stderr": 0, "cmmlu_fullavg_5shot_acc": 78.4, "cmmlu-virology_5shot_acc": 54.82, "cmmlu-sociology_5shot_acc": 89.55, "cmmlu-world_religions_5shot_acc": 83.04, "cmmlu-public_relations_5shot_acc": 70.0, "cmmlu-security_studies_5shot_acc": 80.82, "cmmlu-us_foreign_policy_5shot_acc": 90.0, "cmmlu-professional_psychology_5shot_acc": 80.56, "cmmlu_fullavg_5shot_acc_norm": 78.4, "cmmlu-virology_5shot_acc_norm": 54.82, "cmmlu-sociology_5shot_acc_norm": 89.55, "cmmlu-world_religions_5shot_acc_norm": 83.04, "cmmlu-public_relations_5shot_acc_norm": 70.0, "cmmlu-security_studies_5shot_acc_norm": 80.82, "cmmlu-us_foreign_policy_5shot_acc_norm": 90.0, "cmmlu-professional_psychology_5shot_acc_norm": 80.56 } }, "versions": {}, "config_tasks": {}, "summary_tasks": {}, "summary_general": {} }