ggbetz's picture
Upload data/mistralai/Mistral-7B-Instruct-v0.2/orig/results_24-01-28-20:06:10.json with huggingface_hub
3295773 verified
raw
history blame
2.34 kB
{
"results": {
"logiqa_base": {
"acc,none": 0.2987220447284345,
"acc_stderr,none": 0.01830790800596066,
"alias": "logiqa_base"
},
"lsat-lr_base": {
"acc,none": 0.2803921568627451,
"acc_stderr,none": 0.01991003317147411,
"alias": "lsat-lr_base"
}
},
"configs": {
"logiqa_base": {
"task": "logiqa_base",
"group": "logikon-bench",
"dataset_path": "logikon/logikon-bench",
"dataset_name": "logiqa",
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f18885f1c60>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"lsat-lr_base": {
"task": "lsat-lr_base",
"group": "logikon-bench",
"dataset_path": "logikon/logikon-bench",
"dataset_name": "lsat-lr",
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f188879fd00>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
}
},
"versions": {
"logiqa_base": 0.0,
"lsat-lr_base": 0.0
},
"n-shot": {
"logiqa_base": 0,
"lsat-lr_base": 0
},
"config": {
"model": "vllm",
"model_args": "pretrained=mistralai/Mistral-7B-Instruct-v0.2,revision=main,dtype=auto,gpu_memory_utilization=0.9,trust_remote_code=true,max_length=4096",
"batch_size": "auto",
"batch_sizes": [],
"device": null,
"use_cache": null,
"limit": null,
"bootstrap_iters": 100000,
"gen_kwargs": null
},
"git_hash": "92b0637"
}