|
{ |
|
"config_general": { |
|
"model_name": "TIGER-Lab/MAmmoTH2-8x7B-Plus", |
|
"model_dtype": "float16", |
|
"model_size": 0 |
|
}, |
|
"results": { |
|
"harness-c_arc_challenge": { |
|
"acc_norm": 57.42, |
|
"acc_stderr": 0, |
|
"c_arc_challenge_25shot_acc": 51.71, |
|
"c_arc_challenge_25shot_acc_norm": 57.42 |
|
}, |
|
"harness-c_gsm8k": { |
|
"acc": 56.56, |
|
"acc_stderr": 0, |
|
"c_gsm8k_5shot_acc": 56.56 |
|
}, |
|
"harness-c_hellaswag": { |
|
"acc_norm": 61.38, |
|
"acc_stderr": 0, |
|
"c_hellaswag_10shot_acc": 45.77, |
|
"c_hellaswag_10shot_acc_norm": 61.38 |
|
}, |
|
"harness-c-sem-v2": { |
|
"acc": 77.93, |
|
"acc_stderr": 0, |
|
"c_sem_v2-LLSRC_5shot_acc": 80.58, |
|
"c_sem_v2-SLPWC_5shot_acc": 71.86, |
|
"c_sem_v2-SLRFC_5shot_acc": 78.42, |
|
"c_sem_v2-SLSRC_5shot_acc": 80.86, |
|
"c_sem_v2-LLSRC_5shot_acc_norm": 80.58, |
|
"c_sem_v2-SLPWC_5shot_acc_norm": 71.86, |
|
"c_sem_v2-SLRFC_5shot_acc_norm": 78.42, |
|
"c_sem_v2-SLSRC_5shot_acc_norm": 80.86 |
|
}, |
|
"harness-c_truthfulqa_mc": { |
|
"mc2": 58.49, |
|
"acc_stderr": 0, |
|
"c_truthfulqa_mc_0shot_mc1": 36.35, |
|
"c_truthfulqa_mc_0shot_mc2": 58.49 |
|
}, |
|
"harness-c_winogrande": { |
|
"acc": 62.19, |
|
"acc_stderr": 0, |
|
"c_winogrande_0shot_acc": 62.19 |
|
}, |
|
"harness-cmmlu": { |
|
"acc_norm": 56.53, |
|
"acc_stderr": 0, |
|
"cmmlu_fullavg_5shot_acc": 56.53, |
|
"cmmlu-virology_5shot_acc": 42.77, |
|
"cmmlu-nutrition_5shot_acc": 63.4, |
|
"cmmlu-sociology_5shot_acc": 75.12, |
|
"cmmlu-philosophy_5shot_acc": 59.49, |
|
"cmmlu-prehistory_5shot_acc": 55.86, |
|
"cmmlu-miscellaneous_5shot_acc": 62.45, |
|
"cmmlu-moral_disputes_5shot_acc": 59.54, |
|
"cmmlu-moral_scenarios_5shot_acc": 31.62, |
|
"cmmlu-world_religions_5shot_acc": 63.16, |
|
"cmmlu-professional_law_5shot_acc": 42.31, |
|
"cmmlu-public_relations_5shot_acc": 63.64, |
|
"cmmlu-security_studies_5shot_acc": 67.76, |
|
"cmmlu-us_foreign_policy_5shot_acc": 71.0, |
|
"cmmlu-professional_medicine_5shot_acc": 47.79, |
|
"cmmlu-professional_accounting_5shot_acc": 43.26, |
|
"cmmlu-professional_psychology_5shot_acc": 55.39, |
|
"cmmlu_fullavg_5shot_acc_norm": 56.53, |
|
"cmmlu-virology_5shot_acc_norm": 42.77, |
|
"cmmlu-nutrition_5shot_acc_norm": 63.4, |
|
"cmmlu-sociology_5shot_acc_norm": 75.12, |
|
"cmmlu-philosophy_5shot_acc_norm": 59.49, |
|
"cmmlu-prehistory_5shot_acc_norm": 55.86, |
|
"cmmlu-miscellaneous_5shot_acc_norm": 62.45, |
|
"cmmlu-moral_disputes_5shot_acc_norm": 59.54, |
|
"cmmlu-moral_scenarios_5shot_acc_norm": 31.62, |
|
"cmmlu-world_religions_5shot_acc_norm": 63.16, |
|
"cmmlu-professional_law_5shot_acc_norm": 42.31, |
|
"cmmlu-public_relations_5shot_acc_norm": 63.64, |
|
"cmmlu-security_studies_5shot_acc_norm": 67.76, |
|
"cmmlu-us_foreign_policy_5shot_acc_norm": 71.0, |
|
"cmmlu-professional_medicine_5shot_acc_norm": 47.79, |
|
"cmmlu-professional_accounting_5shot_acc_norm": 43.26, |
|
"cmmlu-professional_psychology_5shot_acc_norm": 55.39 |
|
} |
|
}, |
|
"versions": {}, |
|
"config_tasks": {}, |
|
"summary_tasks": {}, |
|
"summary_general": {} |
|
} |