{ | |
"results": { | |
"arc_challenge": { | |
"acc": 0.20392491467576793, | |
"acc_stderr": 0.01177426247870226, | |
"acc_norm": 0.25, | |
"acc_norm_stderr": 0.012653835621466646 | |
} | |
}, | |
"versions": { | |
"arc_challenge": 0 | |
}, | |
"config": { | |
"model": "hf-causal-experimental", | |
"model_args": "pretrained=BEE-spoke-data/smol_llama-220M-GQA,revision=main,trust_remote_code=True,dtype='bfloat16'", | |
"num_fewshot": 25, | |
"batch_size": "8", | |
"batch_sizes": [], | |
"device": "cuda", | |
"no_cache": false, | |
"limit": null, | |
"bootstrap_iters": 100000, | |
"description_dict": {} | |
} | |
} |