|
{ |
|
"name": "heavy", |
|
"uuid": "c260eedc-dbb5-4ff4-afa9-d163e8d7585b", |
|
"model": "/net/nfs.cirrascale/allennlp/davidw/checkpoints/moe-release/OLMoE-7B-A1B/step1223842-tokens5100B", |
|
"creation_date": "2024_08_06-16_39_52", |
|
"eval_metrics": { |
|
"icl": { |
|
"mmlu_zeroshot": 0.43332746128241223, |
|
"hellaswag_zeroshot": 0.7701653242111206, |
|
"jeopardy": 0.5032301664352417, |
|
"triviaqa_sm_sub": 0.5586666464805603, |
|
"gsm8k_cot": 0.07429870963096619, |
|
"agi_eval_sat_math_cot": 0.06363636255264282, |
|
"aqua_cot": 0.020408162847161293, |
|
"svamp_cot": 0.33000001311302185, |
|
"bigbench_qa_wikidata": 0.6918458938598633, |
|
"arc_easy": 0.7765151262283325, |
|
"arc_challenge": 0.5375426411628723, |
|
"mmlu_fewshot": 0.5333352494135237, |
|
"bigbench_misconceptions": 0.6255707740783691, |
|
"copa": 0.7799999713897705, |
|
"siqa": 0.6934493184089661, |
|
"commonsense_qa": 0.6928746700286865, |
|
"piqa": 0.8204570412635803, |
|
"openbook_qa": 0.4480000138282776, |
|
"bigbench_novel_concepts": 0.65625, |
|
"bigbench_strange_stories": 0.6954023241996765, |
|
"bigbench_strategy_qa": 0.5696811079978943, |
|
"lambada_openai": 0.7327769994735718, |
|
"hellaswag": 0.7857996225357056, |
|
"winograd": 0.8461538553237915, |
|
"winogrande": 0.6898184418678284, |
|
"bigbench_conlang_translation": 0.0731707289814949, |
|
"bigbench_language_identification": 0.31049999594688416, |
|
"bigbench_conceptual_combinations": 0.5631067752838135, |
|
"bigbench_elementary_math_qa": 0.26944443583488464, |
|
"bigbench_dyck_languages": 0.2150000035762787, |
|
"agi_eval_lsat_ar": 0.2869565188884735, |
|
"bigbench_cs_algorithms": 0.47196969389915466, |
|
"bigbench_logical_deduction": 0.2460000067949295, |
|
"bigbench_operators": 0.3380952477455139, |
|
"bigbench_repeat_copy_logic": 0.1875, |
|
"simple_arithmetic_nospaces": 0.20100000500679016, |
|
"simple_arithmetic_withspaces": 0.22100000083446503, |
|
"math_qa": 0.27522629499435425, |
|
"logi_qa": 0.34562212228775024, |
|
"pubmed_qa_labeled": 0.5789999961853027, |
|
"squad": 0.5235572457313538, |
|
"agi_eval_lsat_rc": 0.46641790866851807, |
|
"agi_eval_lsat_lr": 0.37254902720451355, |
|
"coqa": 0.4366779327392578, |
|
"bigbench_understanding_fables": 0.4761904776096344, |
|
"boolq": 0.7318042516708374, |
|
"agi_eval_sat_en": 0.5291262269020081, |
|
"winogender_mc_female": 0.5, |
|
"winogender_mc_male": 0.6000000238418579, |
|
"enterprise_pii_classification": 0.5216494798660278, |
|
"bbq": 0.5055712298913435, |
|
"gpqa_main": 0.2254464328289032, |
|
"gpqa_diamond": 0.19696970283985138 |
|
} |
|
}, |
|
"missing tasks": "[]", |
|
"aggregated_task_categories_centered": { |
|
"commonsense reasoning": 0.46523631517512387, |
|
"language understanding": 0.4930994285627118, |
|
"reading comprehension": 0.3874464948710642, |
|
"safety": 0.06361036679961463, |
|
"symbolic problem solving": 0.1715689478790787, |
|
"world knowledge": 0.36090664066069306 |
|
}, |
|
"aggregated_centered_results": 0.3245905660540145, |
|
"aggregated_results": 0.4716746728993969, |
|
"rw_small": 0.7143076260884603, |
|
"rw_small_centered": 0.5021371105958147, |
|
"95%_CI_above": 0.5717007633347017, |
|
"95%_CI_above_centered": 0.4290739257829261, |
|
"99%_CI_above": 0.581190875561341, |
|
"99%_CI_above_centered": 0.4683558738011154, |
|
"low_variance_datasets": 0.571692757173018, |
|
"low_variance_datasets_centered": 0.4723345583847347 |
|
} |