sha
stringlengths 40
40
| text
stringlengths 0
13.4M
| id
stringlengths 2
117
| tags
sequence | created_at
stringlengths 25
25
| metadata
stringlengths 2
31.7M
| last_modified
stringlengths 25
25
|
---|---|---|---|---|---|---|
53af24510b72b1286e852c97b0065e342a1578f3 | LightFury9/CulturaX_tenglish_split3 | [
"region:us"
] | 2024-02-16T16:57:21+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "translit", "dtype": "string"}], "splits": [{"name": "split3", "num_bytes": 664556992, "num_examples": 62500}], "download_size": 306477565, "dataset_size": 664556992}, "configs": [{"config_name": "default", "data_files": [{"split": "split3", "path": "data/split3-*"}]}]} | 2024-02-16T16:57:37+00:00 |
|
2add737eea0370e3e599b46eccba0e4291e638fa | maghwa/OpenHermes-2-AR-10K-24-660k-670k | [
"region:us"
] | 2024-02-16T16:59:36+00:00 | {"dataset_info": {"features": [{"name": "skip_prompt_formatting", "dtype": "null"}, {"name": "model_name", "dtype": "null"}, {"name": "model", "dtype": "null"}, {"name": "conversations", "dtype": "string"}, {"name": "source", "dtype": "string"}, {"name": "id", "dtype": "null"}, {"name": "avatarUrl", "dtype": "null"}, {"name": "idx", "dtype": "null"}, {"name": "language", "dtype": "null"}, {"name": "hash", "dtype": "null"}, {"name": "views", "dtype": "float64"}, {"name": "topic", "dtype": "null"}, {"name": "title", "dtype": "null"}, {"name": "category", "dtype": "null"}, {"name": "custom_instruction", "dtype": "null"}, {"name": "system_prompt", "dtype": "null"}], "splits": [{"name": "train", "num_bytes": 25145409, "num_examples": 10001}], "download_size": 11380688, "dataset_size": 25145409}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T16:59:38+00:00 |
|
7531b4edf519184084f2c888ad298d31815de251 | lab42/cov-json-vqa-100 | [
"region:us"
] | 2024-02-16T17:06:46+00:00 | {"dataset_info": {"features": [{"name": "image_0", "dtype": "image"}, {"name": "image_1", "dtype": "image"}, {"name": "image_2", "dtype": "image"}, {"name": "images_rest", "sequence": "image"}, {"name": "mask_0", "dtype": "image"}, {"name": "mask_1", "dtype": "image"}, {"name": "mask_2", "dtype": "image"}, {"name": "masks_rest", "sequence": "image"}, {"name": "conversations", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "dataset", "dtype": "string"}, {"name": "split", "dtype": "string"}, {"name": "n_images", "dtype": "int32"}, {"name": "n_masks", "dtype": "int32"}, {"name": "n_conversations", "dtype": "int32"}], "splits": [{"name": "train", "num_bytes": 15308488.0, "num_examples": 88}, {"name": "validation", "num_bytes": 2194113.0, "num_examples": 12}], "download_size": 34755376, "dataset_size": 17502601.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}]} | 2024-02-16T17:13:19+00:00 |
|
1402cb13d7bda6a3dad14642643328ee998646c3 | argilla/OpenHermes-2.5-dpo-idx | [
"region:us"
] | 2024-02-16T17:13:16+00:00 | {"dataset_info": {"features": [{"name": "conversations", "list": [{"name": "from", "dtype": "string"}, {"name": "value", "dtype": "string"}, {"name": "weight", "dtype": "float64"}]}, {"name": "input", "dtype": "string"}, {"name": "generation_model", "sequence": "string"}, {"name": "generation_prompt", "sequence": "string"}, {"name": "raw_generation_responses", "sequence": "string"}, {"name": "generations", "sequence": "string"}, {"name": "row_index", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 2394285076, "num_examples": 454337}], "download_size": 1103886856, "dataset_size": 2394285076}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T17:39:08+00:00 |
|
fa8b089fb05bb116bf913074c457443e77ebb80e | avr4/trainTaxi | [
"region:us"
] | 2024-02-16T17:14:45+00:00 | {} | 2024-02-16T17:14:45+00:00 |
|
ae834313b5eec781ea54dd2b29b9c786fc3c2f74 | techythomas/Sc | [
"region:us"
] | 2024-02-16T17:34:33+00:00 | {} | 2024-02-16T17:34:33+00:00 |
|
03151a93ce2a574c0195f0601afd1833963b5ccd | AsphyXIA/Baarat-Hin-Summarization | [
"region:us"
] | 2024-02-16T17:34:48+00:00 | {"dataset_info": {"features": [{"name": "headline", "dtype": "string"}, {"name": "article", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1278181193, "num_examples": 179835}], "download_size": 291675884, "dataset_size": 1278181193}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T17:35:38+00:00 |
|
8ef41083695aeea7d3cf7256b92ddca18e52bad0 | gvecchio/TestDataset | [
"region:us"
] | 2024-02-16T17:34:59+00:00 | {"dataset_info": {"features": [{"name": "name", "dtype": "string"}, {"name": "category", "dtype": {"class_label": {"names": {"0": "blends", "1": "ceramic", "2": "deschaintre", "3": "concrete", "4": "fabric", "5": "ground", "6": "leather", "7": "marble", "8": "metal", "9": "misc", "10": "plaster", "11": "plastic", "12": "stone", "13": "terracotta", "14": "wood"}}}}, {"name": "license", "dtype": "string"}, {"name": "basecolor", "dtype": "image"}, {"name": "diffuse", "dtype": "image"}, {"name": "displacement", "dtype": "image"}, {"name": "height", "dtype": "image"}, {"name": "metallic", "dtype": "image"}, {"name": "normal", "dtype": "image"}, {"name": "opacity", "dtype": "null"}, {"name": "roughness", "dtype": "image"}, {"name": "specular", "dtype": "image"}], "splits": [{"name": "test", "num_bytes": 919634639.0, "num_examples": 10}], "download_size": 911017856, "dataset_size": 919634639.0}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]} | 2024-02-16T17:42:18+00:00 |
|
935306ba8c08e4cc46c14b6546d5271f18749e92 |
# Dataset Card for Dataset Name
<!-- Provide a quick summary of the dataset. -->
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | suguroglu/crowdsourced-calculator-demo | [
"region:us"
] | 2024-02-16T17:35:42+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data.csv"}]}]} | 2024-02-16T17:38:15+00:00 |
6b0ca2f5f36b65a11b190b40c26a486856cc87cc | lucasjca/pokemon | [
"region:us"
] | 2024-02-16T17:36:27+00:00 | {} | 2024-02-16T17:36:27+00:00 |
|
deb222597e993268303e49a731c0f716b75aec60 | gwenxin/ai2d_diagram | [
"license:cc-by-sa-4.0",
"region:us"
] | 2024-02-16T17:40:53+00:00 | {"license": "cc-by-sa-4.0"} | 2024-02-16T17:40:53+00:00 |
|
e87f2fc32886c1de2bb48c665961916a5ed9c5df |
# Dataset Card for Evaluation run of sethuiyer/Aika-7B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [sethuiyer/Aika-7B](https://huggingface.co/sethuiyer/Aika-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_sethuiyer__Aika-7B",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-16T17:42:59.851366](https://huggingface.co/datasets/open-llm-leaderboard/details_sethuiyer__Aika-7B/blob/main/results_2024-02-16T17-42-59.851366.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.5403534855873607,
"acc_stderr": 0.03412368335032635,
"acc_norm": 0.5457531977609761,
"acc_norm_stderr": 0.034853423231234165,
"mc1": 0.36964504283965727,
"mc1_stderr": 0.016898180706973888,
"mc2": 0.512198702575838,
"mc2_stderr": 0.015478157824850066
},
"harness|arc:challenge|25": {
"acc": 0.5947098976109215,
"acc_stderr": 0.01434686906022932,
"acc_norm": 0.6535836177474402,
"acc_norm_stderr": 0.013905011180063235
},
"harness|hellaswag|10": {
"acc": 0.6006771559450309,
"acc_stderr": 0.004887583074180845,
"acc_norm": 0.8148775144393547,
"acc_norm_stderr": 0.0038760312505449843
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.4740740740740741,
"acc_stderr": 0.04313531696750574,
"acc_norm": 0.4740740740740741,
"acc_norm_stderr": 0.04313531696750574
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6118421052631579,
"acc_stderr": 0.03965842097512744,
"acc_norm": 0.6118421052631579,
"acc_norm_stderr": 0.03965842097512744
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6226415094339622,
"acc_stderr": 0.029832808114796005,
"acc_norm": 0.6226415094339622,
"acc_norm_stderr": 0.029832808114796005
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.5694444444444444,
"acc_stderr": 0.04140685639111503,
"acc_norm": 0.5694444444444444,
"acc_norm_stderr": 0.04140685639111503
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.4,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04923659639173309
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621505
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5895953757225434,
"acc_stderr": 0.03750757044895537,
"acc_norm": 0.5895953757225434,
"acc_norm_stderr": 0.03750757044895537
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04690650298201942,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04690650298201942
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.68,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.68,
"acc_norm_stderr": 0.046882617226215034
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.4808510638297872,
"acc_stderr": 0.032662042990646796,
"acc_norm": 0.4808510638297872,
"acc_norm_stderr": 0.032662042990646796
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.39473684210526316,
"acc_stderr": 0.045981880578165414,
"acc_norm": 0.39473684210526316,
"acc_norm_stderr": 0.045981880578165414
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5448275862068965,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.5448275862068965,
"acc_norm_stderr": 0.04149886942192117
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3968253968253968,
"acc_stderr": 0.025197101074246483,
"acc_norm": 0.3968253968253968,
"acc_norm_stderr": 0.025197101074246483
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.373015873015873,
"acc_stderr": 0.04325506042017086,
"acc_norm": 0.373015873015873,
"acc_norm_stderr": 0.04325506042017086
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.5870967741935483,
"acc_stderr": 0.028009138125400387,
"acc_norm": 0.5870967741935483,
"acc_norm_stderr": 0.028009138125400387
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.35467980295566504,
"acc_stderr": 0.03366124489051449,
"acc_norm": 0.35467980295566504,
"acc_norm_stderr": 0.03366124489051449
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.55,
"acc_stderr": 0.05,
"acc_norm": 0.55,
"acc_norm_stderr": 0.05
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.49696969696969695,
"acc_stderr": 0.03904272341431857,
"acc_norm": 0.49696969696969695,
"acc_norm_stderr": 0.03904272341431857
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7121212121212122,
"acc_stderr": 0.03225883512300992,
"acc_norm": 0.7121212121212122,
"acc_norm_stderr": 0.03225883512300992
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8031088082901554,
"acc_stderr": 0.02869787397186067,
"acc_norm": 0.8031088082901554,
"acc_norm_stderr": 0.02869787397186067
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.541025641025641,
"acc_stderr": 0.025265525491284295,
"acc_norm": 0.541025641025641,
"acc_norm_stderr": 0.025265525491284295
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.026962424325073835,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.026962424325073835
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.5168067226890757,
"acc_stderr": 0.03246013680375308,
"acc_norm": 0.5168067226890757,
"acc_norm_stderr": 0.03246013680375308
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.2847682119205298,
"acc_stderr": 0.03684881521389024,
"acc_norm": 0.2847682119205298,
"acc_norm_stderr": 0.03684881521389024
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7522935779816514,
"acc_stderr": 0.018508143602547825,
"acc_norm": 0.7522935779816514,
"acc_norm_stderr": 0.018508143602547825
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.36574074074074076,
"acc_stderr": 0.03284738857647206,
"acc_norm": 0.36574074074074076,
"acc_norm_stderr": 0.03284738857647206
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.553921568627451,
"acc_stderr": 0.034888454513049734,
"acc_norm": 0.553921568627451,
"acc_norm_stderr": 0.034888454513049734
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.6497890295358649,
"acc_stderr": 0.031052391937584346,
"acc_norm": 0.6497890295358649,
"acc_norm_stderr": 0.031052391937584346
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6188340807174888,
"acc_stderr": 0.03259625118416827,
"acc_norm": 0.6188340807174888,
"acc_norm_stderr": 0.03259625118416827
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6259541984732825,
"acc_stderr": 0.042438692422305246,
"acc_norm": 0.6259541984732825,
"acc_norm_stderr": 0.042438692422305246
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7603305785123967,
"acc_stderr": 0.03896878985070416,
"acc_norm": 0.7603305785123967,
"acc_norm_stderr": 0.03896878985070416
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.6759259259259259,
"acc_stderr": 0.04524596007030048,
"acc_norm": 0.6759259259259259,
"acc_norm_stderr": 0.04524596007030048
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.6380368098159509,
"acc_stderr": 0.037757007291414416,
"acc_norm": 0.6380368098159509,
"acc_norm_stderr": 0.037757007291414416
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.49107142857142855,
"acc_stderr": 0.04745033255489123,
"acc_norm": 0.49107142857142855,
"acc_norm_stderr": 0.04745033255489123
},
"harness|hendrycksTest-management|5": {
"acc": 0.7087378640776699,
"acc_stderr": 0.044986763205729224,
"acc_norm": 0.7087378640776699,
"acc_norm_stderr": 0.044986763205729224
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8376068376068376,
"acc_stderr": 0.02416161812798774,
"acc_norm": 0.8376068376068376,
"acc_norm_stderr": 0.02416161812798774
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.57,
"acc_stderr": 0.04975698519562429,
"acc_norm": 0.57,
"acc_norm_stderr": 0.04975698519562429
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7381864623243933,
"acc_stderr": 0.015720838678445266,
"acc_norm": 0.7381864623243933,
"acc_norm_stderr": 0.015720838678445266
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.5867052023121387,
"acc_stderr": 0.026511261369409244,
"acc_norm": 0.5867052023121387,
"acc_norm_stderr": 0.026511261369409244
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.3541899441340782,
"acc_stderr": 0.015995644947299235,
"acc_norm": 0.3541899441340782,
"acc_norm_stderr": 0.015995644947299235
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.5849673202614379,
"acc_stderr": 0.028213504177824096,
"acc_norm": 0.5849673202614379,
"acc_norm_stderr": 0.028213504177824096
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.5852090032154341,
"acc_stderr": 0.02798268045975957,
"acc_norm": 0.5852090032154341,
"acc_norm_stderr": 0.02798268045975957
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.5709876543209876,
"acc_stderr": 0.027538925613470863,
"acc_norm": 0.5709876543209876,
"acc_norm_stderr": 0.027538925613470863
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.41134751773049644,
"acc_stderr": 0.029354911159940978,
"acc_norm": 0.41134751773049644,
"acc_norm_stderr": 0.029354911159940978
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.3767926988265971,
"acc_stderr": 0.012376459593894402,
"acc_norm": 0.3767926988265971,
"acc_norm_stderr": 0.012376459593894402
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.45955882352941174,
"acc_stderr": 0.030273325077345762,
"acc_norm": 0.45955882352941174,
"acc_norm_stderr": 0.030273325077345762
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.5637254901960784,
"acc_stderr": 0.020062874243539128,
"acc_norm": 0.5637254901960784,
"acc_norm_stderr": 0.020062874243539128
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.04494290866252089,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.04494290866252089
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6122448979591837,
"acc_stderr": 0.03119223072679566,
"acc_norm": 0.6122448979591837,
"acc_norm_stderr": 0.03119223072679566
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.5671641791044776,
"acc_stderr": 0.03503490923673282,
"acc_norm": 0.5671641791044776,
"acc_norm_stderr": 0.03503490923673282
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768079
},
"harness|hendrycksTest-virology|5": {
"acc": 0.45180722891566266,
"acc_stderr": 0.038743715565879536,
"acc_norm": 0.45180722891566266,
"acc_norm_stderr": 0.038743715565879536
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.7426900584795322,
"acc_stderr": 0.03352799844161865,
"acc_norm": 0.7426900584795322,
"acc_norm_stderr": 0.03352799844161865
},
"harness|truthfulqa:mc|0": {
"mc1": 0.36964504283965727,
"mc1_stderr": 0.016898180706973888,
"mc2": 0.512198702575838,
"mc2_stderr": 0.015478157824850066
},
"harness|winogrande|5": {
"acc": 0.7774269928966061,
"acc_stderr": 0.01169093380971267
},
"harness|gsm8k|5": {
"acc": 0.2577710386656558,
"acc_stderr": 0.012048370213576598
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_sethuiyer__Aika-7B | [
"region:us"
] | 2024-02-16T17:45:18+00:00 | {"pretty_name": "Evaluation run of sethuiyer/Aika-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [sethuiyer/Aika-7B](https://huggingface.co/sethuiyer/Aika-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_sethuiyer__Aika-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T17:42:59.851366](https://huggingface.co/datasets/open-llm-leaderboard/details_sethuiyer__Aika-7B/blob/main/results_2024-02-16T17-42-59.851366.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5403534855873607,\n \"acc_stderr\": 0.03412368335032635,\n \"acc_norm\": 0.5457531977609761,\n \"acc_norm_stderr\": 0.034853423231234165,\n \"mc1\": 0.36964504283965727,\n \"mc1_stderr\": 0.016898180706973888,\n \"mc2\": 0.512198702575838,\n \"mc2_stderr\": 0.015478157824850066\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5947098976109215,\n \"acc_stderr\": 0.01434686906022932,\n \"acc_norm\": 0.6535836177474402,\n \"acc_norm_stderr\": 0.013905011180063235\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6006771559450309,\n \"acc_stderr\": 0.004887583074180845,\n \"acc_norm\": 0.8148775144393547,\n \"acc_norm_stderr\": 0.0038760312505449843\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n \"acc_stderr\": 0.04313531696750574,\n \"acc_norm\": 0.4740740740740741,\n \"acc_norm_stderr\": 0.04313531696750574\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6118421052631579,\n \"acc_stderr\": 0.03965842097512744,\n \"acc_norm\": 0.6118421052631579,\n \"acc_norm_stderr\": 0.03965842097512744\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6226415094339622,\n \"acc_stderr\": 0.029832808114796005,\n \"acc_norm\": 0.6226415094339622,\n \"acc_norm_stderr\": 0.029832808114796005\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5694444444444444,\n \"acc_stderr\": 0.04140685639111503,\n \"acc_norm\": 0.5694444444444444,\n \"acc_norm_stderr\": 0.04140685639111503\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5895953757225434,\n \"acc_stderr\": 0.03750757044895537,\n \"acc_norm\": 0.5895953757225434,\n \"acc_norm_stderr\": 0.03750757044895537\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201942,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201942\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4808510638297872,\n \"acc_stderr\": 0.032662042990646796,\n \"acc_norm\": 0.4808510638297872,\n \"acc_norm_stderr\": 0.032662042990646796\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.39473684210526316,\n \"acc_stderr\": 0.045981880578165414,\n \"acc_norm\": 0.39473684210526316,\n \"acc_norm_stderr\": 0.045981880578165414\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3968253968253968,\n \"acc_stderr\": 0.025197101074246483,\n \"acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.025197101074246483\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.373015873015873,\n \"acc_stderr\": 0.04325506042017086,\n \"acc_norm\": 0.373015873015873,\n \"acc_norm_stderr\": 0.04325506042017086\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5870967741935483,\n \"acc_stderr\": 0.028009138125400387,\n \"acc_norm\": 0.5870967741935483,\n \"acc_norm_stderr\": 0.028009138125400387\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.35467980295566504,\n \"acc_stderr\": 0.03366124489051449,\n \"acc_norm\": 0.35467980295566504,\n \"acc_norm_stderr\": 0.03366124489051449\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.49696969696969695,\n \"acc_stderr\": 0.03904272341431857,\n \"acc_norm\": 0.49696969696969695,\n \"acc_norm_stderr\": 0.03904272341431857\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7121212121212122,\n \"acc_stderr\": 0.03225883512300992,\n \"acc_norm\": 0.7121212121212122,\n \"acc_norm_stderr\": 0.03225883512300992\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8031088082901554,\n \"acc_stderr\": 0.02869787397186067,\n \"acc_norm\": 0.8031088082901554,\n \"acc_norm_stderr\": 0.02869787397186067\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.541025641025641,\n \"acc_stderr\": 0.025265525491284295,\n \"acc_norm\": 0.541025641025641,\n \"acc_norm_stderr\": 0.025265525491284295\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073835,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073835\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5168067226890757,\n \"acc_stderr\": 0.03246013680375308,\n \"acc_norm\": 0.5168067226890757,\n \"acc_norm_stderr\": 0.03246013680375308\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2847682119205298,\n \"acc_stderr\": 0.03684881521389024,\n \"acc_norm\": 0.2847682119205298,\n \"acc_norm_stderr\": 0.03684881521389024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7522935779816514,\n \"acc_stderr\": 0.018508143602547825,\n \"acc_norm\": 0.7522935779816514,\n \"acc_norm_stderr\": 0.018508143602547825\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.36574074074074076,\n \"acc_stderr\": 0.03284738857647206,\n \"acc_norm\": 0.36574074074074076,\n \"acc_norm_stderr\": 0.03284738857647206\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.553921568627451,\n \"acc_stderr\": 0.034888454513049734,\n \"acc_norm\": 0.553921568627451,\n \"acc_norm_stderr\": 0.034888454513049734\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6497890295358649,\n \"acc_stderr\": 0.031052391937584346,\n \"acc_norm\": 0.6497890295358649,\n \"acc_norm_stderr\": 0.031052391937584346\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6188340807174888,\n \"acc_stderr\": 0.03259625118416827,\n \"acc_norm\": 0.6188340807174888,\n \"acc_norm_stderr\": 0.03259625118416827\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070416,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070416\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6759259259259259,\n \"acc_stderr\": 0.04524596007030048,\n \"acc_norm\": 0.6759259259259259,\n \"acc_norm_stderr\": 0.04524596007030048\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6380368098159509,\n \"acc_stderr\": 0.037757007291414416,\n \"acc_norm\": 0.6380368098159509,\n \"acc_norm_stderr\": 0.037757007291414416\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7087378640776699,\n \"acc_stderr\": 0.044986763205729224,\n \"acc_norm\": 0.7087378640776699,\n \"acc_norm_stderr\": 0.044986763205729224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8376068376068376,\n \"acc_stderr\": 0.02416161812798774,\n \"acc_norm\": 0.8376068376068376,\n \"acc_norm_stderr\": 0.02416161812798774\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562429,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562429\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7381864623243933,\n \"acc_stderr\": 0.015720838678445266,\n \"acc_norm\": 0.7381864623243933,\n \"acc_norm_stderr\": 0.015720838678445266\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5867052023121387,\n \"acc_stderr\": 0.026511261369409244,\n \"acc_norm\": 0.5867052023121387,\n \"acc_norm_stderr\": 0.026511261369409244\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3541899441340782,\n \"acc_stderr\": 0.015995644947299235,\n \"acc_norm\": 0.3541899441340782,\n \"acc_norm_stderr\": 0.015995644947299235\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5849673202614379,\n \"acc_stderr\": 0.028213504177824096,\n \"acc_norm\": 0.5849673202614379,\n \"acc_norm_stderr\": 0.028213504177824096\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5852090032154341,\n \"acc_stderr\": 0.02798268045975957,\n \"acc_norm\": 0.5852090032154341,\n \"acc_norm_stderr\": 0.02798268045975957\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5709876543209876,\n \"acc_stderr\": 0.027538925613470863,\n \"acc_norm\": 0.5709876543209876,\n \"acc_norm_stderr\": 0.027538925613470863\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.41134751773049644,\n \"acc_stderr\": 0.029354911159940978,\n \"acc_norm\": 0.41134751773049644,\n \"acc_norm_stderr\": 0.029354911159940978\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3767926988265971,\n \"acc_stderr\": 0.012376459593894402,\n \"acc_norm\": 0.3767926988265971,\n \"acc_norm_stderr\": 0.012376459593894402\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.45955882352941174,\n \"acc_stderr\": 0.030273325077345762,\n \"acc_norm\": 0.45955882352941174,\n \"acc_norm_stderr\": 0.030273325077345762\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5637254901960784,\n \"acc_stderr\": 0.020062874243539128,\n \"acc_norm\": 0.5637254901960784,\n \"acc_norm_stderr\": 0.020062874243539128\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252089,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252089\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6122448979591837,\n \"acc_stderr\": 0.03119223072679566,\n \"acc_norm\": 0.6122448979591837,\n \"acc_norm_stderr\": 0.03119223072679566\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5671641791044776,\n \"acc_stderr\": 0.03503490923673282,\n \"acc_norm\": 0.5671641791044776,\n \"acc_norm_stderr\": 0.03503490923673282\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.45180722891566266,\n \"acc_stderr\": 0.038743715565879536,\n \"acc_norm\": 0.45180722891566266,\n \"acc_norm_stderr\": 0.038743715565879536\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7426900584795322,\n \"acc_stderr\": 0.03352799844161865,\n \"acc_norm\": 0.7426900584795322,\n \"acc_norm_stderr\": 0.03352799844161865\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.36964504283965727,\n \"mc1_stderr\": 0.016898180706973888,\n \"mc2\": 0.512198702575838,\n \"mc2_stderr\": 0.015478157824850066\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7774269928966061,\n \"acc_stderr\": 0.01169093380971267\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2577710386656558,\n \"acc_stderr\": 0.012048370213576598\n }\n}\n```", "repo_url": "https://huggingface.co/sethuiyer/Aika-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|arc:challenge|25_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|gsm8k|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hellaswag|10_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T17-42-59.851366.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["**/details_harness|winogrande|5_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T17-42-59.851366.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T17_42_59.851366", "path": ["results_2024-02-16T17-42-59.851366.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T17-42-59.851366.parquet"]}]}]} | 2024-02-16T17:45:43+00:00 |
0a11745da0cfc47d5632fcbb28b449c726c29781 | bertram-gilfoyle/CC-MAIN-2023-23 | [
"region:us"
] | 2024-02-16T17:45:54+00:00 | {} | 2024-02-17T01:44:27+00:00 |
|
3ad6eda2d686f5f3a01cc033a07107c4f88e866c | fattahharith/malaysian-journal-of-computing | [
"region:us"
] | 2024-02-16T17:46:58+00:00 | {} | 2024-02-16T17:51:11+00:00 |
|
0fb0a1b7e002851046c53d4a2c2109085e98bbf3 | felipesampaio2010/jennhi5usa | [
"license:openrail",
"region:us"
] | 2024-02-16T17:48:57+00:00 | {"license": "openrail"} | 2024-02-16T17:49:46+00:00 |
|
3745debbc11f34ee2626f71c5aadec010cc94a23 | gzguevara/test2 | [
"region:us"
] | 2024-02-16T17:54:08+00:00 | {"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "image", "dtype": "image"}, {"name": "mask_0", "dtype": "image"}, {"name": "mask_1", "dtype": "image"}, {"name": "mask_2", "dtype": "image"}, {"name": "mask_3", "dtype": "image"}, {"name": "mask_4", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 6082099.0, "num_examples": 11}, {"name": "test", "num_bytes": 1944663.0, "num_examples": 4}], "download_size": 8125511, "dataset_size": 8026762.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-16T17:54:12+00:00 |
|
1d856855b9d01ece801946c6b3824c6f09aff1fd | # Dataset Card for "distilabel-neurology-preferences"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | macadeliccc/distilabel-neurology-preferences | [
"region:us"
] | 2024-02-16T17:55:14+00:00 | {"language": ["en"], "dataset_info": {"features": [{"name": "input", "dtype": "string"}, {"name": "generation_model", "sequence": "string"}, {"name": "generation_prompt", "sequence": "string"}, {"name": "raw_generation_responses", "sequence": "string"}, {"name": "generations", "sequence": "string"}, {"name": "labelling_model", "dtype": "string"}, {"name": "labelling_prompt", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "raw_labelling_response", "dtype": "string"}, {"name": "rating", "sequence": "float64"}, {"name": "rationale", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 3076288, "num_examples": 500}], "download_size": 1363349, "dataset_size": 3076288}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T17:55:16+00:00 |
032d54cccae26a6e2fe2d5edac8bbeb85ec18562 | AlisaMenekse/ErrorCategoriesBCP_25k_rows | [
"region:us"
] | 2024-02-16T17:59:25+00:00 | {} | 2024-02-16T17:59:50+00:00 |
|
0d92ca40113d3501a21c21fa576ae4c0a9741043 | AsphyXIA/Baarat-Kan-Summarization | [
"region:us"
] | 2024-02-16T18:00:44+00:00 | {"dataset_info": {"features": [{"name": "input", "dtype": "string"}, {"name": "target", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 97489736, "num_examples": 140890}], "download_size": 40315874, "dataset_size": 97489736}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T18:01:01+00:00 |
|
aaeb901d844fc3e3ebdd2a0d936b86c861622016 |
# Dataset Card for Dataset Name
<!-- Provide a quick summary of the dataset. -->
This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1).
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | AlexAmin/test | [
"region:us"
] | 2024-02-16T18:02:59+00:00 | {} | 2024-02-16T18:04:18+00:00 |
803bfdda404f3c7215a33a5078f3f6b138ef0e4f | maghwa/OpenHermes-2-AR-10K-25-670k-680k | [
"region:us"
] | 2024-02-16T18:03:24+00:00 | {"dataset_info": {"features": [{"name": "skip_prompt_formatting", "dtype": "null"}, {"name": "model_name", "dtype": "null"}, {"name": "model", "dtype": "null"}, {"name": "conversations", "dtype": "string"}, {"name": "source", "dtype": "string"}, {"name": "id", "dtype": "null"}, {"name": "avatarUrl", "dtype": "null"}, {"name": "idx", "dtype": "null"}, {"name": "language", "dtype": "null"}, {"name": "hash", "dtype": "null"}, {"name": "views", "dtype": "float64"}, {"name": "topic", "dtype": "null"}, {"name": "title", "dtype": "null"}, {"name": "category", "dtype": "null"}, {"name": "custom_instruction", "dtype": "null"}, {"name": "system_prompt", "dtype": "null"}], "splits": [{"name": "train", "num_bytes": 24962526, "num_examples": 10001}], "download_size": 11272617, "dataset_size": 24962526}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T18:03:27+00:00 |
|
d38ea21f7b9db949ce4228f7b1702cd94a7776ba | tanisthahota/Baarat-Kan-Summarization | [
"region:us"
] | 2024-02-16T18:04:46+00:00 | {"dataset_info": {"features": [{"name": "input", "dtype": "string"}, {"name": "target", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 97489736, "num_examples": 140890}], "download_size": 40315874, "dataset_size": 97489736}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T18:04:55+00:00 |
|
5feea66ec6a389444c0ea5ff55c3f3d170aa660b | gzguevara/test3 | [
"region:us"
] | 2024-02-16T18:09:39+00:00 | {"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "image", "dtype": "image"}, {"name": "mask_0", "dtype": "image"}, {"name": "mask_1", "dtype": "image"}, {"name": "mask_2", "dtype": "image"}, {"name": "mask_3", "dtype": "image"}, {"name": "mask_4", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 3817521.0, "num_examples": 7}, {"name": "test", "num_bytes": 1781388.0, "num_examples": 4}], "download_size": 5733535, "dataset_size": 5598909.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-16T18:09:42+00:00 |
|
5364f3f1bbd3a883121cbeb2d009c5601f0f59e1 | GGital/Signal_Test01 | [
"region:us"
] | 2024-02-16T18:10:13+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "0", "1": "1", "2": "2", "3": "3", "4": "4", "5": "5", "6": "6"}}}}], "splits": [{"name": "train", "num_bytes": 11566389.0, "num_examples": 647}], "download_size": 11525815, "dataset_size": 11566389.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T18:10:18+00:00 |
|
dac9b498c99e51ce266b98c28bd05ac36ef31026 | GGital/Signal_Test02 | [
"region:us"
] | 2024-02-16T18:14:27+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "0", "1": "1", "2": "2", "3": "3", "4": "4", "5": "5", "6": "6"}}}}], "splits": [{"name": "train", "num_bytes": 11566389.0, "num_examples": 647}], "download_size": 11525815, "dataset_size": 11566389.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T18:14:31+00:00 |
|
8ee1f58c6adcefde39176d5c3233796286b1a8ce | OALL/Arabic_MMLU | [
"region:us"
] | 2024-02-16T18:17:40+00:00 | {"dataset_info": [{"config_name": "abstract_algebra", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 29769, "num_examples": 100}, {"name": "dev", "num_bytes": 1269, "num_examples": 5}], "download_size": 19750, "dataset_size": 31038}, {"config_name": "anatomy", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 48669, "num_examples": 135}, {"name": "dev", "num_bytes": 1534, "num_examples": 5}], "download_size": 35258, "dataset_size": 50203}, {"config_name": "astronomy", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 69704, "num_examples": 152}, {"name": "dev", "num_bytes": 2981, "num_examples": 5}], "download_size": 49878, "dataset_size": 72685}, {"config_name": "business_ethics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 51514, "num_examples": 100}, {"name": "dev", "num_bytes": 3288, "num_examples": 5}], "download_size": 37704, "dataset_size": 54802}, {"config_name": "clinical_knowledge", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 102346, "num_examples": 265}, {"name": "dev", "num_bytes": 1810, "num_examples": 5}], "download_size": 63082, "dataset_size": 104156}, {"config_name": "college_biology", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 75007, "num_examples": 144}, {"name": "dev", "num_bytes": 2379, "num_examples": 5}], "download_size": 50193, "dataset_size": 77386}, {"config_name": "college_chemistry", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 37276, "num_examples": 100}, {"name": "dev", "num_bytes": 2083, "num_examples": 5}], "download_size": 31944, "dataset_size": 39359}, {"config_name": "college_computer_science", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 56979, "num_examples": 100}, {"name": "dev", "num_bytes": 3415, "num_examples": 5}], "download_size": 41297, "dataset_size": 60394}, {"config_name": "college_mathematics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 36648, "num_examples": 100}, {"name": "dev", "num_bytes": 1891, "num_examples": 5}], "download_size": 29831, "dataset_size": 38539}, {"config_name": "college_medicine", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 80163, "num_examples": 173}, {"name": "dev", "num_bytes": 2650, "num_examples": 5}], "download_size": 53862, "dataset_size": 82813}, {"config_name": "college_physics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 42431, "num_examples": 102}, {"name": "dev", "num_bytes": 1828, "num_examples": 5}], "download_size": 30292, "dataset_size": 44259}, {"config_name": "computer_security", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 39166, "num_examples": 100}, {"name": "dev", "num_bytes": 1750, "num_examples": 5}], "download_size": 31153, "dataset_size": 40916}, {"config_name": "conceptual_physics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 69000, "num_examples": 235}, {"name": "dev", "num_bytes": 1537, "num_examples": 5}], "download_size": 40421, "dataset_size": 70537}, {"config_name": "econometrics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 63979, "num_examples": 114}, {"name": "dev", "num_bytes": 2364, "num_examples": 5}], "download_size": 44448, "dataset_size": 66343}, {"config_name": "electrical_engineering", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 42482, "num_examples": 145}, {"name": "dev", "num_bytes": 1680, "num_examples": 5}], "download_size": 31774, "dataset_size": 44162}, {"config_name": "elementary_mathematics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 108603, "num_examples": 378}, {"name": "dev", "num_bytes": 2078, "num_examples": 5}], "download_size": 61970, "dataset_size": 110681}, {"config_name": "formal_logic", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 69054, "num_examples": 126}, {"name": "dev", "num_bytes": 2558, "num_examples": 5}], "download_size": 43567, "dataset_size": 71612}, {"config_name": "global_facts", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 30511, "num_examples": 100}, {"name": "dev", "num_bytes": 1752, "num_examples": 5}], "download_size": 26776, "dataset_size": 32263}, {"config_name": "high_school_biology", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 168964, "num_examples": 310}, {"name": "dev", "num_bytes": 2865, "num_examples": 5}], "download_size": 90706, "dataset_size": 171829}, {"config_name": "high_school_chemistry", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 89575, "num_examples": 203}, {"name": "dev", "num_bytes": 2145, "num_examples": 5}], "download_size": 52145, "dataset_size": 91720}, {"config_name": "high_school_computer_science", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 62039, "num_examples": 100}, {"name": "dev", "num_bytes": 4358, "num_examples": 5}], "download_size": 46934, "dataset_size": 66397}, {"config_name": "high_school_european_history", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 95458, "num_examples": 165}, {"name": "dev", "num_bytes": 2434, "num_examples": 5}], "download_size": 49160, "dataset_size": 97892}, {"config_name": "high_school_geography", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 72427, "num_examples": 198}, {"name": "dev", "num_bytes": 2184, "num_examples": 5}], "download_size": 44749, "dataset_size": 74611}, {"config_name": "high_school_government_and_politics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 107773, "num_examples": 193}, {"name": "dev", "num_bytes": 2774, "num_examples": 5}], "download_size": 63285, "dataset_size": 110547}, {"config_name": "high_school_macroeconomics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 196950, "num_examples": 390}, {"name": "dev", "num_bytes": 2481, "num_examples": 5}], "download_size": 91074, "dataset_size": 199431}, {"config_name": "high_school_mathematics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 83340, "num_examples": 270}, {"name": "dev", "num_bytes": 2072, "num_examples": 5}], "download_size": 46560, "dataset_size": 85412}, {"config_name": "high_school_microeconomics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 125185, "num_examples": 238}, {"name": "dev", "num_bytes": 1952, "num_examples": 5}], "download_size": 64821, "dataset_size": 127137}, {"config_name": "high_school_physics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 78332, "num_examples": 151}, {"name": "dev", "num_bytes": 2221, "num_examples": 5}], "download_size": 46384, "dataset_size": 80553}, {"config_name": "high_school_psychology", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 246335, "num_examples": 545}, {"name": "dev", "num_bytes": 2501, "num_examples": 5}], "download_size": 122056, "dataset_size": 248836}, {"config_name": "high_school_statistics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 148636, "num_examples": 216}, {"name": "dev", "num_bytes": 3053, "num_examples": 5}], "download_size": 83364, "dataset_size": 151689}, {"config_name": "high_school_us_history", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 117928, "num_examples": 204}, {"name": "dev", "num_bytes": 2353, "num_examples": 5}], "download_size": 45590, "dataset_size": 120281}, {"config_name": "high_school_world_history", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 138288, "num_examples": 237}, {"name": "dev", "num_bytes": 2270, "num_examples": 5}], "download_size": 57174, "dataset_size": 140558}, {"config_name": "human_aging", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 74221, "num_examples": 223}, {"name": "dev", "num_bytes": 1620, "num_examples": 5}], "download_size": 48124, "dataset_size": 75841}, {"config_name": "human_sexuality", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 49433, "num_examples": 131}, {"name": "dev", "num_bytes": 1705, "num_examples": 5}], "download_size": 36031, "dataset_size": 51138}, {"config_name": "international_law", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 79679, "num_examples": 121}, {"name": "dev", "num_bytes": 3626, "num_examples": 5}], "download_size": 58645, "dataset_size": 83305}, {"config_name": "jurisprudence", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 46821, "num_examples": 108}, {"name": "dev", "num_bytes": 1705, "num_examples": 5}], "download_size": 38797, "dataset_size": 48526}, {"config_name": "logical_fallacies", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 73002, "num_examples": 163}, {"name": "dev", "num_bytes": 2225, "num_examples": 5}], "download_size": 45485, "dataset_size": 75227}, {"config_name": "machine_learning", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 49230, "num_examples": 112}, {"name": "dev", "num_bytes": 3443, "num_examples": 5}], "download_size": 40348, "dataset_size": 52673}, {"config_name": "management", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 29353, "num_examples": 103}, {"name": "dev", "num_bytes": 1262, "num_examples": 5}], "download_size": 25701, "dataset_size": 30615}, {"config_name": "marketing", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 92377, "num_examples": 234}, {"name": "dev", "num_bytes": 2487, "num_examples": 5}], "download_size": 58101, "dataset_size": 94864}, {"config_name": "medical_genetics", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 33633, "num_examples": 100}, {"name": "dev", "num_bytes": 2032, "num_examples": 5}], "download_size": 30302, "dataset_size": 35665}, {"config_name": "miscellaneous", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 214072, "num_examples": 783}, {"name": "dev", "num_bytes": 1109, "num_examples": 5}], "download_size": 123867, "dataset_size": 215181}, {"config_name": "moral_disputes", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 163324, "num_examples": 346}, {"name": "dev", "num_bytes": 2599, "num_examples": 5}], "download_size": 92773, "dataset_size": 165923}, {"config_name": "moral_scenarios", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 632998, "num_examples": 895}, {"name": "dev", "num_bytes": 3372, "num_examples": 5}], "download_size": 167360, "dataset_size": 636370}, {"config_name": "nutrition", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 143862, "num_examples": 306}, {"name": "dev", "num_bytes": 3217, "num_examples": 5}], "download_size": 86988, "dataset_size": 147079}, {"config_name": "philosophy", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 112934, "num_examples": 311}, {"name": "dev", "num_bytes": 1375, "num_examples": 5}], "download_size": 67743, "dataset_size": 114309}, {"config_name": "prehistory", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 136174, "num_examples": 324}, {"name": "dev", "num_bytes": 2840, "num_examples": 5}], "download_size": 82678, "dataset_size": 139014}, {"config_name": "professional_accounting", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 178116, "num_examples": 282}, {"name": "dev", "num_bytes": 2765, "num_examples": 5}], "download_size": 98823, "dataset_size": 180881}, {"config_name": "professional_law", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 1771393, "num_examples": 1534}, {"name": "dev", "num_bytes": 6926, "num_examples": 5}], "download_size": 833880, "dataset_size": 1778319}, {"config_name": "professional_medicine", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 166458, "num_examples": 272}, {"name": "dev", "num_bytes": 2964, "num_examples": 5}], "download_size": 78692, "dataset_size": 169422}, {"config_name": "professional_psychology", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 313950, "num_examples": 612}, {"name": "dev", "num_bytes": 3183, "num_examples": 5}], "download_size": 167005, "dataset_size": 317133}, {"config_name": "public_relations", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 42175, "num_examples": 110}, {"name": "dev", "num_bytes": 2266, "num_examples": 5}], "download_size": 34096, "dataset_size": 44441}, {"config_name": "security_studies", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 288908, "num_examples": 245}, {"name": "dev", "num_bytes": 7190, "num_examples": 5}], "download_size": 162137, "dataset_size": 296098}, {"config_name": "sociology", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 97962, "num_examples": 201}, {"name": "dev", "num_bytes": 2490, "num_examples": 5}], "download_size": 62735, "dataset_size": 100452}, {"config_name": "us_foreign_policy", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 47525, "num_examples": 100}, {"name": "dev", "num_bytes": 2725, "num_examples": 5}], "download_size": 35472, "dataset_size": 50250}, {"config_name": "virology", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 63377, "num_examples": 166}, {"name": "dev", "num_bytes": 1777, "num_examples": 5}], "download_size": 42481, "dataset_size": 65154}, {"config_name": "world_religions", "features": [{"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "subject", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 40435, "num_examples": 171}, {"name": "dev", "num_bytes": 1000, "num_examples": 5}], "download_size": 28872, "dataset_size": 41435}], "configs": [{"config_name": "abstract_algebra", "data_files": [{"split": "test", "path": "abstract_algebra/test-*"}, {"split": "dev", "path": "abstract_algebra/dev-*"}]}, {"config_name": "anatomy", "data_files": [{"split": "test", "path": "anatomy/test-*"}, {"split": "dev", "path": "anatomy/dev-*"}]}, {"config_name": "astronomy", "data_files": [{"split": "test", "path": "astronomy/test-*"}, {"split": "dev", "path": "astronomy/dev-*"}]}, {"config_name": "business_ethics", "data_files": [{"split": "test", "path": "business_ethics/test-*"}, {"split": "dev", "path": "business_ethics/dev-*"}]}, {"config_name": "clinical_knowledge", "data_files": [{"split": "test", "path": "clinical_knowledge/test-*"}, {"split": "dev", "path": "clinical_knowledge/dev-*"}]}, {"config_name": "college_biology", "data_files": [{"split": "test", "path": "college_biology/test-*"}, {"split": "dev", "path": "college_biology/dev-*"}]}, {"config_name": "college_chemistry", "data_files": [{"split": "test", "path": "college_chemistry/test-*"}, {"split": "dev", "path": "college_chemistry/dev-*"}]}, {"config_name": "college_computer_science", "data_files": [{"split": "test", "path": "college_computer_science/test-*"}, {"split": "dev", "path": "college_computer_science/dev-*"}]}, {"config_name": "college_mathematics", "data_files": [{"split": "test", "path": "college_mathematics/test-*"}, {"split": "dev", "path": "college_mathematics/dev-*"}]}, {"config_name": "college_medicine", "data_files": [{"split": "test", "path": "college_medicine/test-*"}, {"split": "dev", "path": "college_medicine/dev-*"}]}, {"config_name": "college_physics", "data_files": [{"split": "test", "path": "college_physics/test-*"}, {"split": "dev", "path": "college_physics/dev-*"}]}, {"config_name": "computer_security", "data_files": [{"split": "test", "path": "computer_security/test-*"}, {"split": "dev", "path": "computer_security/dev-*"}]}, {"config_name": "conceptual_physics", "data_files": [{"split": "test", "path": "conceptual_physics/test-*"}, {"split": "dev", "path": "conceptual_physics/dev-*"}]}, {"config_name": "econometrics", "data_files": [{"split": "test", "path": "econometrics/test-*"}, {"split": "dev", "path": "econometrics/dev-*"}]}, {"config_name": "electrical_engineering", "data_files": [{"split": "test", "path": "electrical_engineering/test-*"}, {"split": "dev", "path": "electrical_engineering/dev-*"}]}, {"config_name": "elementary_mathematics", "data_files": [{"split": "test", "path": "elementary_mathematics/test-*"}, {"split": "dev", "path": "elementary_mathematics/dev-*"}]}, {"config_name": "formal_logic", "data_files": [{"split": "test", "path": "formal_logic/test-*"}, {"split": "dev", "path": "formal_logic/dev-*"}]}, {"config_name": "global_facts", "data_files": [{"split": "test", "path": "global_facts/test-*"}, {"split": "dev", "path": "global_facts/dev-*"}]}, {"config_name": "high_school_biology", "data_files": [{"split": "test", "path": "high_school_biology/test-*"}, {"split": "dev", "path": "high_school_biology/dev-*"}]}, {"config_name": "high_school_chemistry", "data_files": [{"split": "test", "path": "high_school_chemistry/test-*"}, {"split": "dev", "path": "high_school_chemistry/dev-*"}]}, {"config_name": "high_school_computer_science", "data_files": [{"split": "test", "path": "high_school_computer_science/test-*"}, {"split": "dev", "path": "high_school_computer_science/dev-*"}]}, {"config_name": "high_school_european_history", "data_files": [{"split": "test", "path": "high_school_european_history/test-*"}, {"split": "dev", "path": "high_school_european_history/dev-*"}]}, {"config_name": "high_school_geography", "data_files": [{"split": "test", "path": "high_school_geography/test-*"}, {"split": "dev", "path": "high_school_geography/dev-*"}]}, {"config_name": "high_school_government_and_politics", "data_files": [{"split": "test", "path": "high_school_government_and_politics/test-*"}, {"split": "dev", "path": "high_school_government_and_politics/dev-*"}]}, {"config_name": "high_school_macroeconomics", "data_files": [{"split": "test", "path": "high_school_macroeconomics/test-*"}, {"split": "dev", "path": "high_school_macroeconomics/dev-*"}]}, {"config_name": "high_school_mathematics", "data_files": [{"split": "test", "path": "high_school_mathematics/test-*"}, {"split": "dev", "path": "high_school_mathematics/dev-*"}]}, {"config_name": "high_school_microeconomics", "data_files": [{"split": "test", "path": "high_school_microeconomics/test-*"}, {"split": "dev", "path": "high_school_microeconomics/dev-*"}]}, {"config_name": "high_school_physics", "data_files": [{"split": "test", "path": "high_school_physics/test-*"}, {"split": "dev", "path": "high_school_physics/dev-*"}]}, {"config_name": "high_school_psychology", "data_files": [{"split": "test", "path": "high_school_psychology/test-*"}, {"split": "dev", "path": "high_school_psychology/dev-*"}]}, {"config_name": "high_school_statistics", "data_files": [{"split": "test", "path": "high_school_statistics/test-*"}, {"split": "dev", "path": "high_school_statistics/dev-*"}]}, {"config_name": "high_school_us_history", "data_files": [{"split": "test", "path": "high_school_us_history/test-*"}, {"split": "dev", "path": "high_school_us_history/dev-*"}]}, {"config_name": "high_school_world_history", "data_files": [{"split": "test", "path": "high_school_world_history/test-*"}, {"split": "dev", "path": "high_school_world_history/dev-*"}]}, {"config_name": "human_aging", "data_files": [{"split": "test", "path": "human_aging/test-*"}, {"split": "dev", "path": "human_aging/dev-*"}]}, {"config_name": "human_sexuality", "data_files": [{"split": "test", "path": "human_sexuality/test-*"}, {"split": "dev", "path": "human_sexuality/dev-*"}]}, {"config_name": "international_law", "data_files": [{"split": "test", "path": "international_law/test-*"}, {"split": "dev", "path": "international_law/dev-*"}]}, {"config_name": "jurisprudence", "data_files": [{"split": "test", "path": "jurisprudence/test-*"}, {"split": "dev", "path": "jurisprudence/dev-*"}]}, {"config_name": "logical_fallacies", "data_files": [{"split": "test", "path": "logical_fallacies/test-*"}, {"split": "dev", "path": "logical_fallacies/dev-*"}]}, {"config_name": "machine_learning", "data_files": [{"split": "test", "path": "machine_learning/test-*"}, {"split": "dev", "path": "machine_learning/dev-*"}]}, {"config_name": "management", "data_files": [{"split": "test", "path": "management/test-*"}, {"split": "dev", "path": "management/dev-*"}]}, {"config_name": "marketing", "data_files": [{"split": "test", "path": "marketing/test-*"}, {"split": "dev", "path": "marketing/dev-*"}]}, {"config_name": "medical_genetics", "data_files": [{"split": "test", "path": "medical_genetics/test-*"}, {"split": "dev", "path": "medical_genetics/dev-*"}]}, {"config_name": "miscellaneous", "data_files": [{"split": "test", "path": "miscellaneous/test-*"}, {"split": "dev", "path": "miscellaneous/dev-*"}]}, {"config_name": "moral_disputes", "data_files": [{"split": "test", "path": "moral_disputes/test-*"}, {"split": "dev", "path": "moral_disputes/dev-*"}]}, {"config_name": "moral_scenarios", "data_files": [{"split": "test", "path": "moral_scenarios/test-*"}, {"split": "dev", "path": "moral_scenarios/dev-*"}]}, {"config_name": "nutrition", "data_files": [{"split": "test", "path": "nutrition/test-*"}, {"split": "dev", "path": "nutrition/dev-*"}]}, {"config_name": "philosophy", "data_files": [{"split": "test", "path": "philosophy/test-*"}, {"split": "dev", "path": "philosophy/dev-*"}]}, {"config_name": "prehistory", "data_files": [{"split": "test", "path": "prehistory/test-*"}, {"split": "dev", "path": "prehistory/dev-*"}]}, {"config_name": "professional_accounting", "data_files": [{"split": "test", "path": "professional_accounting/test-*"}, {"split": "dev", "path": "professional_accounting/dev-*"}]}, {"config_name": "professional_law", "data_files": [{"split": "test", "path": "professional_law/test-*"}, {"split": "dev", "path": "professional_law/dev-*"}]}, {"config_name": "professional_medicine", "data_files": [{"split": "test", "path": "professional_medicine/test-*"}, {"split": "dev", "path": "professional_medicine/dev-*"}]}, {"config_name": "professional_psychology", "data_files": [{"split": "test", "path": "professional_psychology/test-*"}, {"split": "dev", "path": "professional_psychology/dev-*"}]}, {"config_name": "public_relations", "data_files": [{"split": "test", "path": "public_relations/test-*"}, {"split": "dev", "path": "public_relations/dev-*"}]}, {"config_name": "security_studies", "data_files": [{"split": "test", "path": "security_studies/test-*"}, {"split": "dev", "path": "security_studies/dev-*"}]}, {"config_name": "sociology", "data_files": [{"split": "test", "path": "sociology/test-*"}, {"split": "dev", "path": "sociology/dev-*"}]}, {"config_name": "us_foreign_policy", "data_files": [{"split": "test", "path": "us_foreign_policy/test-*"}, {"split": "dev", "path": "us_foreign_policy/dev-*"}]}, {"config_name": "virology", "data_files": [{"split": "test", "path": "virology/test-*"}, {"split": "dev", "path": "virology/dev-*"}]}, {"config_name": "world_religions", "data_files": [{"split": "test", "path": "world_religions/test-*"}, {"split": "dev", "path": "world_religions/dev-*"}]}]} | 2024-02-16T18:23:17+00:00 |
|
361656f37a9affb12dd3ffa246e3018ee9a4de93 | gzguevara/test4 | [
"region:us"
] | 2024-02-16T18:18:08+00:00 | {"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "image", "dtype": "image"}, {"name": "mask_0", "dtype": "image"}, {"name": "mask_1", "dtype": "image"}, {"name": "mask_2", "dtype": "image"}, {"name": "mask_3", "dtype": "image"}, {"name": "mask_4", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 3037151.0, "num_examples": 7}, {"name": "test", "num_bytes": 1391954.0, "num_examples": 3}], "download_size": 4553943, "dataset_size": 4429105.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-16T18:18:11+00:00 |
|
46dc11b59c2522d2bf7a28ff0b2fb038e8cee4ed | tpremoli/CelebA-attrs-20k | [
"region:us"
] | 2024-02-16T18:18:16+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "5_o_Clock_Shadow", "dtype": "int64"}, {"name": "Arched_Eyebrows", "dtype": "int64"}, {"name": "Attractive", "dtype": "int64"}, {"name": "Bags_Under_Eyes", "dtype": "int64"}, {"name": "Bald", "dtype": "int64"}, {"name": "Bangs", "dtype": "int64"}, {"name": "Big_Lips", "dtype": "int64"}, {"name": "Big_Nose", "dtype": "int64"}, {"name": "Black_Hair", "dtype": "int64"}, {"name": "Blond_Hair", "dtype": "int64"}, {"name": "Blurry", "dtype": "int64"}, {"name": "Brown_Hair", "dtype": "int64"}, {"name": "Bushy_Eyebrows", "dtype": "int64"}, {"name": "Chubby", "dtype": "int64"}, {"name": "Double_Chin", "dtype": "int64"}, {"name": "Eyeglasses", "dtype": "int64"}, {"name": "Goatee", "dtype": "int64"}, {"name": "Gray_Hair", "dtype": "int64"}, {"name": "Heavy_Makeup", "dtype": "int64"}, {"name": "High_Cheekbones", "dtype": "int64"}, {"name": "Male", "dtype": "int64"}, {"name": "Mouth_Slightly_Open", "dtype": "int64"}, {"name": "Mustache", "dtype": "int64"}, {"name": "Narrow_Eyes", "dtype": "int64"}, {"name": "No_Beard", "dtype": "int64"}, {"name": "Oval_Face", "dtype": "int64"}, {"name": "Pale_Skin", "dtype": "int64"}, {"name": "Pointy_Nose", "dtype": "int64"}, {"name": "Receding_Hairline", "dtype": "int64"}, {"name": "Rosy_Cheeks", "dtype": "int64"}, {"name": "Sideburns", "dtype": "int64"}, {"name": "Smiling", "dtype": "int64"}, {"name": "Straight_Hair", "dtype": "int64"}, {"name": "Wavy_Hair", "dtype": "int64"}, {"name": "Wearing_Earrings", "dtype": "int64"}, {"name": "Wearing_Hat", "dtype": "int64"}, {"name": "Wearing_Lipstick", "dtype": "int64"}, {"name": "Wearing_Necklace", "dtype": "int64"}, {"name": "Wearing_Necktie", "dtype": "int64"}, {"name": "Young", "dtype": "int64"}, {"name": "prompt_string", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 148503924.518, "num_examples": 19999}, {"name": "validation", "num_bytes": 14740084.381, "num_examples": 1969}, {"name": "test", "num_bytes": 14676253.48, "num_examples": 1960}], "download_size": 168503967, "dataset_size": 177920262.379}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-16T20:11:20+00:00 |
|
b2a71d0b027d4af3ac3b05aa9a4fd43acc441b08 |
# Dataset Card for Evaluation run of Josephgflowers/TinyLlama-748M-Reason-With-Cinder-Test-2
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [Josephgflowers/TinyLlama-748M-Reason-With-Cinder-Test-2](https://huggingface.co/Josephgflowers/TinyLlama-748M-Reason-With-Cinder-Test-2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_Josephgflowers__TinyLlama-748M-Reason-With-Cinder-Test-2",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-16T18:21:24.569209](https://huggingface.co/datasets/open-llm-leaderboard/details_Josephgflowers__TinyLlama-748M-Reason-With-Cinder-Test-2/blob/main/results_2024-02-16T18-21-24.569209.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.2521207309170715,
"acc_stderr": 0.030556259826906736,
"acc_norm": 0.2529609814071766,
"acc_norm_stderr": 0.03131972311648323,
"mc1": 0.2558139534883721,
"mc1_stderr": 0.015274176219283352,
"mc2": 0.42762316543412854,
"mc2_stderr": 0.015330016474026912
},
"harness|arc:challenge|25": {
"acc": 0.22781569965870307,
"acc_stderr": 0.012256708602326912,
"acc_norm": 0.24658703071672355,
"acc_norm_stderr": 0.012595726268790134
},
"harness|hellaswag|10": {
"acc": 0.304919338777136,
"acc_stderr": 0.004594323838650341,
"acc_norm": 0.34495120493925513,
"acc_norm_stderr": 0.004743808792037872
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.19,
"acc_stderr": 0.039427724440366234,
"acc_norm": 0.19,
"acc_norm_stderr": 0.039427724440366234
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.3037037037037037,
"acc_stderr": 0.039725528847851375,
"acc_norm": 0.3037037037037037,
"acc_norm_stderr": 0.039725528847851375
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.17763157894736842,
"acc_stderr": 0.03110318238312337,
"acc_norm": 0.17763157894736842,
"acc_norm_stderr": 0.03110318238312337
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.21132075471698114,
"acc_stderr": 0.025125766484827842,
"acc_norm": 0.21132075471698114,
"acc_norm_stderr": 0.025125766484827842
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.24305555555555555,
"acc_stderr": 0.03586879280080342,
"acc_norm": 0.24305555555555555,
"acc_norm_stderr": 0.03586879280080342
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.2543352601156069,
"acc_stderr": 0.0332055644308557,
"acc_norm": 0.2543352601156069,
"acc_norm_stderr": 0.0332055644308557
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.04488482852329017,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.04488482852329017
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.17,
"acc_stderr": 0.0377525168068637,
"acc_norm": 0.17,
"acc_norm_stderr": 0.0377525168068637
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.23829787234042554,
"acc_stderr": 0.027851252973889778,
"acc_norm": 0.23829787234042554,
"acc_norm_stderr": 0.027851252973889778
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.2543859649122807,
"acc_stderr": 0.040969851398436695,
"acc_norm": 0.2543859649122807,
"acc_norm_stderr": 0.040969851398436695
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.22758620689655173,
"acc_stderr": 0.03493950380131184,
"acc_norm": 0.22758620689655173,
"acc_norm_stderr": 0.03493950380131184
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.23015873015873015,
"acc_stderr": 0.02167921966369314,
"acc_norm": 0.23015873015873015,
"acc_norm_stderr": 0.02167921966369314
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.16666666666666666,
"acc_stderr": 0.03333333333333337,
"acc_norm": 0.16666666666666666,
"acc_norm_stderr": 0.03333333333333337
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.18,
"acc_stderr": 0.038612291966536934,
"acc_norm": 0.18,
"acc_norm_stderr": 0.038612291966536934
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.3161290322580645,
"acc_stderr": 0.02645087448904277,
"acc_norm": 0.3161290322580645,
"acc_norm_stderr": 0.02645087448904277
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.29064039408866993,
"acc_stderr": 0.0319474007226554,
"acc_norm": 0.29064039408866993,
"acc_norm_stderr": 0.0319474007226554
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.23,
"acc_stderr": 0.042295258468165065,
"acc_norm": 0.23,
"acc_norm_stderr": 0.042295258468165065
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.2606060606060606,
"acc_stderr": 0.034277431758165236,
"acc_norm": 0.2606060606060606,
"acc_norm_stderr": 0.034277431758165236
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.2676767676767677,
"acc_stderr": 0.03154449888270285,
"acc_norm": 0.2676767676767677,
"acc_norm_stderr": 0.03154449888270285
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.27979274611398963,
"acc_stderr": 0.03239637046735703,
"acc_norm": 0.27979274611398963,
"acc_norm_stderr": 0.03239637046735703
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.2692307692307692,
"acc_stderr": 0.022489389793654845,
"acc_norm": 0.2692307692307692,
"acc_norm_stderr": 0.022489389793654845
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.02606715922227579,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.02606715922227579
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.33613445378151263,
"acc_stderr": 0.03068473711513537,
"acc_norm": 0.33613445378151263,
"acc_norm_stderr": 0.03068473711513537
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.23178807947019867,
"acc_stderr": 0.03445406271987054,
"acc_norm": 0.23178807947019867,
"acc_norm_stderr": 0.03445406271987054
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.24770642201834864,
"acc_stderr": 0.01850814360254782,
"acc_norm": 0.24770642201834864,
"acc_norm_stderr": 0.01850814360254782
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.4675925925925926,
"acc_stderr": 0.03402801581358966,
"acc_norm": 0.4675925925925926,
"acc_norm_stderr": 0.03402801581358966
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.27941176470588236,
"acc_stderr": 0.031493281045079556,
"acc_norm": 0.27941176470588236,
"acc_norm_stderr": 0.031493281045079556
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.23628691983122363,
"acc_stderr": 0.027652153144159263,
"acc_norm": 0.23628691983122363,
"acc_norm_stderr": 0.027652153144159263
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.21524663677130046,
"acc_stderr": 0.027584066602208263,
"acc_norm": 0.21524663677130046,
"acc_norm_stderr": 0.027584066602208263
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.2748091603053435,
"acc_stderr": 0.03915345408847836,
"acc_norm": 0.2748091603053435,
"acc_norm_stderr": 0.03915345408847836
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.2892561983471074,
"acc_stderr": 0.041391127276354626,
"acc_norm": 0.2892561983471074,
"acc_norm_stderr": 0.041391127276354626
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.21296296296296297,
"acc_stderr": 0.0395783547198098,
"acc_norm": 0.21296296296296297,
"acc_norm_stderr": 0.0395783547198098
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.2392638036809816,
"acc_stderr": 0.033519538795212696,
"acc_norm": 0.2392638036809816,
"acc_norm_stderr": 0.033519538795212696
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.23214285714285715,
"acc_stderr": 0.04007341809755807,
"acc_norm": 0.23214285714285715,
"acc_norm_stderr": 0.04007341809755807
},
"harness|hendrycksTest-management|5": {
"acc": 0.1941747572815534,
"acc_stderr": 0.039166677628225836,
"acc_norm": 0.1941747572815534,
"acc_norm_stderr": 0.039166677628225836
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.20085470085470086,
"acc_stderr": 0.02624677294689048,
"acc_norm": 0.20085470085470086,
"acc_norm_stderr": 0.02624677294689048
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816505
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.26309067688378035,
"acc_stderr": 0.01574549716904906,
"acc_norm": 0.26309067688378035,
"acc_norm_stderr": 0.01574549716904906
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.2254335260115607,
"acc_stderr": 0.022497230190967547,
"acc_norm": 0.2254335260115607,
"acc_norm_stderr": 0.022497230190967547
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.24692737430167597,
"acc_stderr": 0.014422292204808871,
"acc_norm": 0.24692737430167597,
"acc_norm_stderr": 0.014422292204808871
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.024954184324879912,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.024954184324879912
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.29260450160771706,
"acc_stderr": 0.025839898334877983,
"acc_norm": 0.29260450160771706,
"acc_norm_stderr": 0.025839898334877983
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.02492200116888633,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.02492200116888633
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.2198581560283688,
"acc_stderr": 0.024706141070705474,
"acc_norm": 0.2198581560283688,
"acc_norm_stderr": 0.024706141070705474
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.2196870925684485,
"acc_stderr": 0.010574639934167518,
"acc_norm": 0.2196870925684485,
"acc_norm_stderr": 0.010574639934167518
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.39338235294117646,
"acc_stderr": 0.02967428828131118,
"acc_norm": 0.39338235294117646,
"acc_norm_stderr": 0.02967428828131118
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.016906615927288145,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.016906615927288145
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.2727272727272727,
"acc_stderr": 0.04265792110940589,
"acc_norm": 0.2727272727272727,
"acc_norm_stderr": 0.04265792110940589
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.3551020408163265,
"acc_stderr": 0.030635655150387638,
"acc_norm": 0.3551020408163265,
"acc_norm_stderr": 0.030635655150387638
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.22885572139303484,
"acc_stderr": 0.029705284056772436,
"acc_norm": 0.22885572139303484,
"acc_norm_stderr": 0.029705284056772436
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.22,
"acc_stderr": 0.0416333199893227,
"acc_norm": 0.22,
"acc_norm_stderr": 0.0416333199893227
},
"harness|hendrycksTest-virology|5": {
"acc": 0.21686746987951808,
"acc_stderr": 0.03208284450356365,
"acc_norm": 0.21686746987951808,
"acc_norm_stderr": 0.03208284450356365
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.28654970760233917,
"acc_stderr": 0.03467826685703826,
"acc_norm": 0.28654970760233917,
"acc_norm_stderr": 0.03467826685703826
},
"harness|truthfulqa:mc|0": {
"mc1": 0.2558139534883721,
"mc1_stderr": 0.015274176219283352,
"mc2": 0.42762316543412854,
"mc2_stderr": 0.015330016474026912
},
"harness|winogrande|5": {
"acc": 0.505130228887135,
"acc_stderr": 0.014051745961790516
},
"harness|gsm8k|5": {
"acc": 0.008339651250947688,
"acc_stderr": 0.002504942226860505
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_Josephgflowers__TinyLlama-748M-Reason-With-Cinder-Test-2 | [
"region:us"
] | 2024-02-16T18:23:16+00:00 | {"pretty_name": "Evaluation run of Josephgflowers/TinyLlama-748M-Reason-With-Cinder-Test-2", "dataset_summary": "Dataset automatically created during the evaluation run of model [Josephgflowers/TinyLlama-748M-Reason-With-Cinder-Test-2](https://huggingface.co/Josephgflowers/TinyLlama-748M-Reason-With-Cinder-Test-2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Josephgflowers__TinyLlama-748M-Reason-With-Cinder-Test-2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T18:21:24.569209](https://huggingface.co/datasets/open-llm-leaderboard/details_Josephgflowers__TinyLlama-748M-Reason-With-Cinder-Test-2/blob/main/results_2024-02-16T18-21-24.569209.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2521207309170715,\n \"acc_stderr\": 0.030556259826906736,\n \"acc_norm\": 0.2529609814071766,\n \"acc_norm_stderr\": 0.03131972311648323,\n \"mc1\": 0.2558139534883721,\n \"mc1_stderr\": 0.015274176219283352,\n \"mc2\": 0.42762316543412854,\n \"mc2_stderr\": 0.015330016474026912\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.22781569965870307,\n \"acc_stderr\": 0.012256708602326912,\n \"acc_norm\": 0.24658703071672355,\n \"acc_norm_stderr\": 0.012595726268790134\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.304919338777136,\n \"acc_stderr\": 0.004594323838650341,\n \"acc_norm\": 0.34495120493925513,\n \"acc_norm_stderr\": 0.004743808792037872\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.19,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3037037037037037,\n \"acc_stderr\": 0.039725528847851375,\n \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.039725528847851375\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.03110318238312337,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.03110318238312337\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21132075471698114,\n \"acc_stderr\": 0.025125766484827842,\n \"acc_norm\": 0.21132075471698114,\n \"acc_norm_stderr\": 0.025125766484827842\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.24305555555555555,\n \"acc_stderr\": 0.03586879280080342,\n \"acc_norm\": 0.24305555555555555,\n \"acc_norm_stderr\": 0.03586879280080342\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2543352601156069,\n \"acc_stderr\": 0.0332055644308557,\n \"acc_norm\": 0.2543352601156069,\n \"acc_norm_stderr\": 0.0332055644308557\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.23829787234042554,\n \"acc_stderr\": 0.027851252973889778,\n \"acc_norm\": 0.23829787234042554,\n \"acc_norm_stderr\": 0.027851252973889778\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n \"acc_stderr\": 0.040969851398436695,\n \"acc_norm\": 0.2543859649122807,\n \"acc_norm_stderr\": 0.040969851398436695\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.22758620689655173,\n \"acc_stderr\": 0.03493950380131184,\n \"acc_norm\": 0.22758620689655173,\n \"acc_norm_stderr\": 0.03493950380131184\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.23015873015873015,\n \"acc_stderr\": 0.02167921966369314,\n \"acc_norm\": 0.23015873015873015,\n \"acc_norm_stderr\": 0.02167921966369314\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.16666666666666666,\n \"acc_stderr\": 0.03333333333333337,\n \"acc_norm\": 0.16666666666666666,\n \"acc_norm_stderr\": 0.03333333333333337\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.3161290322580645,\n \"acc_stderr\": 0.02645087448904277,\n \"acc_norm\": 0.3161290322580645,\n \"acc_norm_stderr\": 0.02645087448904277\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.29064039408866993,\n \"acc_stderr\": 0.0319474007226554,\n \"acc_norm\": 0.29064039408866993,\n \"acc_norm_stderr\": 0.0319474007226554\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2606060606060606,\n \"acc_stderr\": 0.034277431758165236,\n \"acc_norm\": 0.2606060606060606,\n \"acc_norm_stderr\": 0.034277431758165236\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.2676767676767677,\n \"acc_stderr\": 0.03154449888270285,\n \"acc_norm\": 0.2676767676767677,\n \"acc_norm_stderr\": 0.03154449888270285\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.27979274611398963,\n \"acc_stderr\": 0.03239637046735703,\n \"acc_norm\": 0.27979274611398963,\n \"acc_norm_stderr\": 0.03239637046735703\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2692307692307692,\n \"acc_stderr\": 0.022489389793654845,\n \"acc_norm\": 0.2692307692307692,\n \"acc_norm_stderr\": 0.022489389793654845\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.02606715922227579,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.02606715922227579\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.33613445378151263,\n \"acc_stderr\": 0.03068473711513537,\n \"acc_norm\": 0.33613445378151263,\n \"acc_norm_stderr\": 0.03068473711513537\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.23178807947019867,\n \"acc_stderr\": 0.03445406271987054,\n \"acc_norm\": 0.23178807947019867,\n \"acc_norm_stderr\": 0.03445406271987054\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.24770642201834864,\n \"acc_stderr\": 0.01850814360254782,\n \"acc_norm\": 0.24770642201834864,\n \"acc_norm_stderr\": 0.01850814360254782\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4675925925925926,\n \"acc_stderr\": 0.03402801581358966,\n \"acc_norm\": 0.4675925925925926,\n \"acc_norm_stderr\": 0.03402801581358966\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.27941176470588236,\n \"acc_stderr\": 0.031493281045079556,\n \"acc_norm\": 0.27941176470588236,\n \"acc_norm_stderr\": 0.031493281045079556\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.23628691983122363,\n \"acc_stderr\": 0.027652153144159263,\n \"acc_norm\": 0.23628691983122363,\n \"acc_norm_stderr\": 0.027652153144159263\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.21524663677130046,\n \"acc_stderr\": 0.027584066602208263,\n \"acc_norm\": 0.21524663677130046,\n \"acc_norm_stderr\": 0.027584066602208263\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2748091603053435,\n \"acc_stderr\": 0.03915345408847836,\n \"acc_norm\": 0.2748091603053435,\n \"acc_norm_stderr\": 0.03915345408847836\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2892561983471074,\n \"acc_stderr\": 0.041391127276354626,\n \"acc_norm\": 0.2892561983471074,\n \"acc_norm_stderr\": 0.041391127276354626\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.21296296296296297,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.21296296296296297,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2392638036809816,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.2392638036809816,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.23214285714285715,\n \"acc_stderr\": 0.04007341809755807,\n \"acc_norm\": 0.23214285714285715,\n \"acc_norm_stderr\": 0.04007341809755807\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.1941747572815534,\n \"acc_stderr\": 0.039166677628225836,\n \"acc_norm\": 0.1941747572815534,\n \"acc_norm_stderr\": 0.039166677628225836\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.20085470085470086,\n \"acc_stderr\": 0.02624677294689048,\n \"acc_norm\": 0.20085470085470086,\n \"acc_norm_stderr\": 0.02624677294689048\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.26309067688378035,\n \"acc_stderr\": 0.01574549716904906,\n \"acc_norm\": 0.26309067688378035,\n \"acc_norm_stderr\": 0.01574549716904906\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2254335260115607,\n \"acc_stderr\": 0.022497230190967547,\n \"acc_norm\": 0.2254335260115607,\n \"acc_norm_stderr\": 0.022497230190967547\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n \"acc_stderr\": 0.014422292204808871,\n \"acc_norm\": 0.24692737430167597,\n \"acc_norm_stderr\": 0.014422292204808871\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.024954184324879912,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.024954184324879912\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.29260450160771706,\n \"acc_stderr\": 0.025839898334877983,\n \"acc_norm\": 0.29260450160771706,\n \"acc_norm_stderr\": 0.025839898334877983\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.02492200116888633,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.02492200116888633\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2198581560283688,\n \"acc_stderr\": 0.024706141070705474,\n \"acc_norm\": 0.2198581560283688,\n \"acc_norm_stderr\": 0.024706141070705474\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2196870925684485,\n \"acc_stderr\": 0.010574639934167518,\n \"acc_norm\": 0.2196870925684485,\n \"acc_norm_stderr\": 0.010574639934167518\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.39338235294117646,\n \"acc_stderr\": 0.02967428828131118,\n \"acc_norm\": 0.39338235294117646,\n \"acc_norm_stderr\": 0.02967428828131118\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.016906615927288145,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.016906615927288145\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2727272727272727,\n \"acc_stderr\": 0.04265792110940589,\n \"acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.04265792110940589\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.3551020408163265,\n \"acc_stderr\": 0.030635655150387638,\n \"acc_norm\": 0.3551020408163265,\n \"acc_norm_stderr\": 0.030635655150387638\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.22885572139303484,\n \"acc_stderr\": 0.029705284056772436,\n \"acc_norm\": 0.22885572139303484,\n \"acc_norm_stderr\": 0.029705284056772436\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.0416333199893227,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.0416333199893227\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.21686746987951808,\n \"acc_stderr\": 0.03208284450356365,\n \"acc_norm\": 0.21686746987951808,\n \"acc_norm_stderr\": 0.03208284450356365\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.28654970760233917,\n \"acc_stderr\": 0.03467826685703826,\n \"acc_norm\": 0.28654970760233917,\n \"acc_norm_stderr\": 0.03467826685703826\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2558139534883721,\n \"mc1_stderr\": 0.015274176219283352,\n \"mc2\": 0.42762316543412854,\n \"mc2_stderr\": 0.015330016474026912\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.505130228887135,\n \"acc_stderr\": 0.014051745961790516\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.008339651250947688,\n \"acc_stderr\": 0.002504942226860505\n }\n}\n```", "repo_url": "https://huggingface.co/Josephgflowers/TinyLlama-748M-Reason-With-Cinder-Test-2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|arc:challenge|25_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|gsm8k|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hellaswag|10_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T18-21-24.569209.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["**/details_harness|winogrande|5_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T18-21-24.569209.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T18_21_24.569209", "path": ["results_2024-02-16T18-21-24.569209.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T18-21-24.569209.parquet"]}]}]} | 2024-02-16T18:23:39+00:00 |
8a22d74f1034242d8fcc0dfff1573d129dc7a89a | Blutkoete/docki-ap1 | [
"region:us"
] | 2024-02-16T18:23:37+00:00 | {"dataset_info": {"features": [{"name": "input_ids", "sequence": "int32"}], "splits": [{"name": "train", "num_bytes": 123464.0, "num_examples": 2806}, {"name": "test", "num_bytes": 30888.0, "num_examples": 702}], "download_size": 80525, "dataset_size": 154352.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-16T18:23:40+00:00 |
|
bd013972020ce0b5900a269237aa327b6bf5c343 | HEMASENTHIL/SAMPLE_DATASET | [
"region:us"
] | 2024-02-16T18:26:05+00:00 | {"dataset_info": {"features": [{"name": "Roll no", "dtype": "int64"}, {"name": "english_sentence", "dtype": "string"}, {"name": "Thanglish Sentence", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 512.0, "num_examples": 4}, {"name": "test", "num_bytes": 256.0, "num_examples": 2}], "download_size": 5596, "dataset_size": 768.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-16T18:41:53+00:00 |
|
0e1960315ccebae67c830aa8fd694f4596c277e8 | kings-crown/Aircrafts_updated | [
"license:mit",
"region:us"
] | 2024-02-16T18:28:10+00:00 | {"license": "mit"} | 2024-02-16T18:43:30+00:00 |
|
527e592ebc727c2a225b0a93658c2179b896b12f | alisson40889/louca | [
"license:openrail",
"region:us"
] | 2024-02-16T18:29:15+00:00 | {"license": "openrail"} | 2024-02-16T18:30:03+00:00 |
|
d29ae760112111ce4172936b0bf2be2ea3898ead | Onrglc/Onur | [
"size_categories:1K<n<10K",
"language:tr",
"language:en",
"license:apache-2.0",
"art",
"region:us"
] | 2024-02-16T18:32:47+00:00 | {"language": ["tr", "en"], "license": "apache-2.0", "size_categories": ["1K<n<10K"], "tags": ["art"]} | 2024-02-16T18:35:06+00:00 |
|
0bdcc50a89fb1697fba9a40d5ee628c8515d53b1 |
# Dataset Card for Evaluation run of shahzebnaveed/StarlingHermes-2.5-Mistral-7B-slerp
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [shahzebnaveed/StarlingHermes-2.5-Mistral-7B-slerp](https://huggingface.co/shahzebnaveed/StarlingHermes-2.5-Mistral-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_shahzebnaveed__StarlingHermes-2.5-Mistral-7B-slerp",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-16T18:45:36.411445](https://huggingface.co/datasets/open-llm-leaderboard/details_shahzebnaveed__StarlingHermes-2.5-Mistral-7B-slerp/blob/main/results_2024-02-16T18-45-36.411445.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6499656745636075,
"acc_stderr": 0.03190074035438905,
"acc_norm": 0.6509134828280464,
"acc_norm_stderr": 0.032545406816765036,
"mc1": 0.3402692778457772,
"mc1_stderr": 0.016586304901762564,
"mc2": 0.49557311062145515,
"mc2_stderr": 0.015305674753451043
},
"harness|arc:challenge|25": {
"acc": 0.6313993174061433,
"acc_stderr": 0.014097810678042198,
"acc_norm": 0.6604095563139932,
"acc_norm_stderr": 0.013839039762820169
},
"harness|hellaswag|10": {
"acc": 0.6680940051782513,
"acc_stderr": 0.0046993506536956225,
"acc_norm": 0.851822346146186,
"acc_norm_stderr": 0.0035454991695580535
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5925925925925926,
"acc_stderr": 0.04244633238353227,
"acc_norm": 0.5925925925925926,
"acc_norm_stderr": 0.04244633238353227
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7171052631578947,
"acc_stderr": 0.03665349695640767,
"acc_norm": 0.7171052631578947,
"acc_norm_stderr": 0.03665349695640767
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237102
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7018867924528301,
"acc_stderr": 0.028152837942493875,
"acc_norm": 0.7018867924528301,
"acc_norm_stderr": 0.028152837942493875
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.03476590104304134,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.03476590104304134
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542126,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542126
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6820809248554913,
"acc_stderr": 0.0355068398916558,
"acc_norm": 0.6820809248554913,
"acc_norm_stderr": 0.0355068398916558
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4215686274509804,
"acc_stderr": 0.04913595201274498,
"acc_norm": 0.4215686274509804,
"acc_norm_stderr": 0.04913595201274498
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.76,
"acc_norm_stderr": 0.04292346959909284
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5574468085106383,
"acc_stderr": 0.03246956919789958,
"acc_norm": 0.5574468085106383,
"acc_norm_stderr": 0.03246956919789958
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4649122807017544,
"acc_stderr": 0.046920083813689104,
"acc_norm": 0.4649122807017544,
"acc_norm_stderr": 0.046920083813689104
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5586206896551724,
"acc_stderr": 0.04137931034482758,
"acc_norm": 0.5586206896551724,
"acc_norm_stderr": 0.04137931034482758
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.42063492063492064,
"acc_stderr": 0.025424835086924,
"acc_norm": 0.42063492063492064,
"acc_norm_stderr": 0.025424835086924
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.49206349206349204,
"acc_stderr": 0.044715725362943486,
"acc_norm": 0.49206349206349204,
"acc_norm_stderr": 0.044715725362943486
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.8032258064516129,
"acc_stderr": 0.02261640942074202,
"acc_norm": 0.8032258064516129,
"acc_norm_stderr": 0.02261640942074202
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.49261083743842365,
"acc_stderr": 0.03517603540361008,
"acc_norm": 0.49261083743842365,
"acc_norm_stderr": 0.03517603540361008
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7636363636363637,
"acc_stderr": 0.03317505930009181,
"acc_norm": 0.7636363636363637,
"acc_norm_stderr": 0.03317505930009181
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7828282828282829,
"acc_stderr": 0.029376616484945633,
"acc_norm": 0.7828282828282829,
"acc_norm_stderr": 0.029376616484945633
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9067357512953368,
"acc_stderr": 0.020986854593289733,
"acc_norm": 0.9067357512953368,
"acc_norm_stderr": 0.020986854593289733
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6717948717948717,
"acc_stderr": 0.023807633198657266,
"acc_norm": 0.6717948717948717,
"acc_norm_stderr": 0.023807633198657266
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.31851851851851853,
"acc_stderr": 0.02840653309060846,
"acc_norm": 0.31851851851851853,
"acc_norm_stderr": 0.02840653309060846
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6974789915966386,
"acc_stderr": 0.029837962388291943,
"acc_norm": 0.6974789915966386,
"acc_norm_stderr": 0.029837962388291943
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.32450331125827814,
"acc_stderr": 0.03822746937658752,
"acc_norm": 0.32450331125827814,
"acc_norm_stderr": 0.03822746937658752
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8477064220183487,
"acc_stderr": 0.015405084393157074,
"acc_norm": 0.8477064220183487,
"acc_norm_stderr": 0.015405084393157074
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5416666666666666,
"acc_stderr": 0.03398110890294636,
"acc_norm": 0.5416666666666666,
"acc_norm_stderr": 0.03398110890294636
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8382352941176471,
"acc_stderr": 0.025845017986926917,
"acc_norm": 0.8382352941176471,
"acc_norm_stderr": 0.025845017986926917
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7932489451476793,
"acc_stderr": 0.0263616516683891,
"acc_norm": 0.7932489451476793,
"acc_norm_stderr": 0.0263616516683891
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7130044843049327,
"acc_stderr": 0.030360379710291954,
"acc_norm": 0.7130044843049327,
"acc_norm_stderr": 0.030360379710291954
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8015267175572519,
"acc_stderr": 0.034981493854624734,
"acc_norm": 0.8015267175572519,
"acc_norm_stderr": 0.034981493854624734
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.03695980128098822,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.03695980128098822
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7685185185185185,
"acc_stderr": 0.04077494709252627,
"acc_norm": 0.7685185185185185,
"acc_norm_stderr": 0.04077494709252627
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7914110429447853,
"acc_stderr": 0.031921934489347235,
"acc_norm": 0.7914110429447853,
"acc_norm_stderr": 0.031921934489347235
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.45535714285714285,
"acc_stderr": 0.047268355537191,
"acc_norm": 0.45535714285714285,
"acc_norm_stderr": 0.047268355537191
},
"harness|hendrycksTest-management|5": {
"acc": 0.8252427184466019,
"acc_stderr": 0.03760178006026621,
"acc_norm": 0.8252427184466019,
"acc_norm_stderr": 0.03760178006026621
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8547008547008547,
"acc_stderr": 0.023086635086841407,
"acc_norm": 0.8547008547008547,
"acc_norm_stderr": 0.023086635086841407
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.73,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8250319284802043,
"acc_stderr": 0.013586619219903338,
"acc_norm": 0.8250319284802043,
"acc_norm_stderr": 0.013586619219903338
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7369942196531792,
"acc_stderr": 0.023703099525258172,
"acc_norm": 0.7369942196531792,
"acc_norm_stderr": 0.023703099525258172
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4044692737430168,
"acc_stderr": 0.016414440917293147,
"acc_norm": 0.4044692737430168,
"acc_norm_stderr": 0.016414440917293147
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7320261437908496,
"acc_stderr": 0.025360603796242557,
"acc_norm": 0.7320261437908496,
"acc_norm_stderr": 0.025360603796242557
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.707395498392283,
"acc_stderr": 0.02583989833487798,
"acc_norm": 0.707395498392283,
"acc_norm_stderr": 0.02583989833487798
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7376543209876543,
"acc_stderr": 0.02447722285613511,
"acc_norm": 0.7376543209876543,
"acc_norm_stderr": 0.02447722285613511
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.46808510638297873,
"acc_stderr": 0.029766675075873866,
"acc_norm": 0.46808510638297873,
"acc_norm_stderr": 0.029766675075873866
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.47327249022164275,
"acc_stderr": 0.012751977967676008,
"acc_norm": 0.47327249022164275,
"acc_norm_stderr": 0.012751977967676008
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6985294117647058,
"acc_stderr": 0.027875982114273168,
"acc_norm": 0.6985294117647058,
"acc_norm_stderr": 0.027875982114273168
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6683006535947712,
"acc_stderr": 0.019047485239360378,
"acc_norm": 0.6683006535947712,
"acc_norm_stderr": 0.019047485239360378
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7551020408163265,
"acc_stderr": 0.02752963744017493,
"acc_norm": 0.7551020408163265,
"acc_norm_stderr": 0.02752963744017493
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.845771144278607,
"acc_stderr": 0.025538433368578334,
"acc_norm": 0.845771144278607,
"acc_norm_stderr": 0.025538433368578334
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.89,
"acc_stderr": 0.03144660377352203,
"acc_norm": 0.89,
"acc_norm_stderr": 0.03144660377352203
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5421686746987951,
"acc_stderr": 0.0387862677100236,
"acc_norm": 0.5421686746987951,
"acc_norm_stderr": 0.0387862677100236
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8421052631578947,
"acc_stderr": 0.027966785859160882,
"acc_norm": 0.8421052631578947,
"acc_norm_stderr": 0.027966785859160882
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3402692778457772,
"mc1_stderr": 0.016586304901762564,
"mc2": 0.49557311062145515,
"mc2_stderr": 0.015305674753451043
},
"harness|winogrande|5": {
"acc": 0.7971586424625099,
"acc_stderr": 0.011301439925936648
},
"harness|gsm8k|5": {
"acc": 0.6595905989385898,
"acc_stderr": 0.013052097103299099
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_shahzebnaveed__StarlingHermes-2.5-Mistral-7B-slerp | [
"region:us"
] | 2024-02-16T18:36:34+00:00 | {"pretty_name": "Evaluation run of shahzebnaveed/StarlingHermes-2.5-Mistral-7B-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [shahzebnaveed/StarlingHermes-2.5-Mistral-7B-slerp](https://huggingface.co/shahzebnaveed/StarlingHermes-2.5-Mistral-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_shahzebnaveed__StarlingHermes-2.5-Mistral-7B-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T18:45:36.411445](https://huggingface.co/datasets/open-llm-leaderboard/details_shahzebnaveed__StarlingHermes-2.5-Mistral-7B-slerp/blob/main/results_2024-02-16T18-45-36.411445.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6499656745636075,\n \"acc_stderr\": 0.03190074035438905,\n \"acc_norm\": 0.6509134828280464,\n \"acc_norm_stderr\": 0.032545406816765036,\n \"mc1\": 0.3402692778457772,\n \"mc1_stderr\": 0.016586304901762564,\n \"mc2\": 0.49557311062145515,\n \"mc2_stderr\": 0.015305674753451043\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6313993174061433,\n \"acc_stderr\": 0.014097810678042198,\n \"acc_norm\": 0.6604095563139932,\n \"acc_norm_stderr\": 0.013839039762820169\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6680940051782513,\n \"acc_stderr\": 0.0046993506536956225,\n \"acc_norm\": 0.851822346146186,\n \"acc_norm_stderr\": 0.0035454991695580535\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5925925925925926,\n \"acc_stderr\": 0.04244633238353227,\n \"acc_norm\": 0.5925925925925926,\n \"acc_norm_stderr\": 0.04244633238353227\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7171052631578947,\n \"acc_stderr\": 0.03665349695640767,\n \"acc_norm\": 0.7171052631578947,\n \"acc_norm_stderr\": 0.03665349695640767\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.028152837942493875,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.028152837942493875\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542126,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542126\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n \"acc_stderr\": 0.046920083813689104,\n \"acc_norm\": 0.4649122807017544,\n \"acc_norm_stderr\": 0.046920083813689104\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482758,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482758\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.025424835086924,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.025424835086924\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.49206349206349204,\n \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.49206349206349204,\n \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8032258064516129,\n \"acc_stderr\": 0.02261640942074202,\n \"acc_norm\": 0.8032258064516129,\n \"acc_norm_stderr\": 0.02261640942074202\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.03517603540361008,\n \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.03517603540361008\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009181,\n \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009181\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.029376616484945633,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.029376616484945633\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.020986854593289733,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.020986854593289733\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6717948717948717,\n \"acc_stderr\": 0.023807633198657266,\n \"acc_norm\": 0.6717948717948717,\n \"acc_norm_stderr\": 0.023807633198657266\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6974789915966386,\n \"acc_stderr\": 0.029837962388291943,\n \"acc_norm\": 0.6974789915966386,\n \"acc_norm_stderr\": 0.029837962388291943\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.03822746937658752,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.03822746937658752\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5416666666666666,\n \"acc_stderr\": 0.03398110890294636,\n \"acc_norm\": 0.5416666666666666,\n \"acc_norm_stderr\": 0.03398110890294636\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926917,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926917\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7932489451476793,\n \"acc_stderr\": 0.0263616516683891,\n \"acc_norm\": 0.7932489451476793,\n \"acc_norm_stderr\": 0.0263616516683891\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7130044843049327,\n \"acc_stderr\": 0.030360379710291954,\n \"acc_norm\": 0.7130044843049327,\n \"acc_norm_stderr\": 0.030360379710291954\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.034981493854624734,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.034981493854624734\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098822,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098822\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7914110429447853,\n \"acc_stderr\": 0.031921934489347235,\n \"acc_norm\": 0.7914110429447853,\n \"acc_norm_stderr\": 0.031921934489347235\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.03760178006026621,\n \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.03760178006026621\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.023086635086841407,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.023086635086841407\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.013586619219903338,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.013586619219903338\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7369942196531792,\n \"acc_stderr\": 0.023703099525258172,\n \"acc_norm\": 0.7369942196531792,\n \"acc_norm_stderr\": 0.023703099525258172\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4044692737430168,\n \"acc_stderr\": 0.016414440917293147,\n \"acc_norm\": 0.4044692737430168,\n \"acc_norm_stderr\": 0.016414440917293147\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7320261437908496,\n \"acc_stderr\": 0.025360603796242557,\n \"acc_norm\": 0.7320261437908496,\n \"acc_norm_stderr\": 0.025360603796242557\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7376543209876543,\n \"acc_stderr\": 0.02447722285613511,\n \"acc_norm\": 0.7376543209876543,\n \"acc_norm_stderr\": 0.02447722285613511\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46808510638297873,\n \"acc_stderr\": 0.029766675075873866,\n \"acc_norm\": 0.46808510638297873,\n \"acc_norm_stderr\": 0.029766675075873866\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47327249022164275,\n \"acc_stderr\": 0.012751977967676008,\n \"acc_norm\": 0.47327249022164275,\n \"acc_norm_stderr\": 0.012751977967676008\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6985294117647058,\n \"acc_stderr\": 0.027875982114273168,\n \"acc_norm\": 0.6985294117647058,\n \"acc_norm_stderr\": 0.027875982114273168\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.019047485239360378,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.019047485239360378\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7551020408163265,\n \"acc_stderr\": 0.02752963744017493,\n \"acc_norm\": 0.7551020408163265,\n \"acc_norm_stderr\": 0.02752963744017493\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578334,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578334\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160882,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160882\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3402692778457772,\n \"mc1_stderr\": 0.016586304901762564,\n \"mc2\": 0.49557311062145515,\n \"mc2_stderr\": 0.015305674753451043\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7971586424625099,\n \"acc_stderr\": 0.011301439925936648\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6595905989385898,\n \"acc_stderr\": 0.013052097103299099\n }\n}\n```", "repo_url": "https://huggingface.co/shahzebnaveed/StarlingHermes-2.5-Mistral-7B-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|arc:challenge|25_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|arc:challenge|25_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|gsm8k|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|gsm8k|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hellaswag|10_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hellaswag|10_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T18-34-16.912824.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T18-45-36.411445.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["**/details_harness|winogrande|5_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["**/details_harness|winogrande|5_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T18-45-36.411445.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T18_34_16.912824", "path": ["results_2024-02-16T18-34-16.912824.parquet"]}, {"split": "2024_02_16T18_45_36.411445", "path": ["results_2024-02-16T18-45-36.411445.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T18-45-36.411445.parquet"]}]}]} | 2024-02-16T18:47:56+00:00 |
5661c3f6035e3ef65e1d7680807563b0917d0bf0 |
# Dataset Card for Evaluation run of shahzebnaveed/NeuralHermes-2.5-Mistral-7B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [shahzebnaveed/NeuralHermes-2.5-Mistral-7B](https://huggingface.co/shahzebnaveed/NeuralHermes-2.5-Mistral-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_shahzebnaveed__NeuralHermes-2.5-Mistral-7B",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-16T18:38:39.552249](https://huggingface.co/datasets/open-llm-leaderboard/details_shahzebnaveed__NeuralHermes-2.5-Mistral-7B/blob/main/results_2024-02-16T18-38-39.552249.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6389813616730637,
"acc_stderr": 0.032241943281319616,
"acc_norm": 0.6417845405458356,
"acc_norm_stderr": 0.03288190466152383,
"mc1": 0.3598531211750306,
"mc1_stderr": 0.01680186046667716,
"mc2": 0.5229369986337806,
"mc2_stderr": 0.01525073227946668
},
"harness|arc:challenge|25": {
"acc": 0.6126279863481229,
"acc_stderr": 0.014235872487909869,
"acc_norm": 0.6484641638225256,
"acc_norm_stderr": 0.013952413699600931
},
"harness|hellaswag|10": {
"acc": 0.6519617606054571,
"acc_stderr": 0.004753746951620152,
"acc_norm": 0.8428599880501892,
"acc_norm_stderr": 0.003631889496122536
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6074074074074074,
"acc_stderr": 0.0421850621536888,
"acc_norm": 0.6074074074074074,
"acc_norm_stderr": 0.0421850621536888
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7039473684210527,
"acc_stderr": 0.03715062154998904,
"acc_norm": 0.7039473684210527,
"acc_norm_stderr": 0.03715062154998904
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.57,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.57,
"acc_norm_stderr": 0.049756985195624284
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6867924528301886,
"acc_stderr": 0.02854479331905533,
"acc_norm": 0.6867924528301886,
"acc_norm_stderr": 0.02854479331905533
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7569444444444444,
"acc_stderr": 0.03586879280080341,
"acc_norm": 0.7569444444444444,
"acc_norm_stderr": 0.03586879280080341
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6242774566473989,
"acc_stderr": 0.036928207672648664,
"acc_norm": 0.6242774566473989,
"acc_norm_stderr": 0.036928207672648664
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.38235294117647056,
"acc_stderr": 0.04835503696107223,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.04835503696107223
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.76,
"acc_norm_stderr": 0.042923469599092816
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5659574468085107,
"acc_stderr": 0.03240038086792747,
"acc_norm": 0.5659574468085107,
"acc_norm_stderr": 0.03240038086792747
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.04702880432049615,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.04702880432049615
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5241379310344828,
"acc_stderr": 0.0416180850350153,
"acc_norm": 0.5241379310344828,
"acc_norm_stderr": 0.0416180850350153
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.42592592592592593,
"acc_stderr": 0.02546714904546955,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.02546714904546955
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4603174603174603,
"acc_stderr": 0.04458029125470973,
"acc_norm": 0.4603174603174603,
"acc_norm_stderr": 0.04458029125470973
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7903225806451613,
"acc_stderr": 0.023157879349083525,
"acc_norm": 0.7903225806451613,
"acc_norm_stderr": 0.023157879349083525
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5172413793103449,
"acc_stderr": 0.035158955511656986,
"acc_norm": 0.5172413793103449,
"acc_norm_stderr": 0.035158955511656986
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.68,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.68,
"acc_norm_stderr": 0.04688261722621505
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7818181818181819,
"acc_stderr": 0.032250781083062896,
"acc_norm": 0.7818181818181819,
"acc_norm_stderr": 0.032250781083062896
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8080808080808081,
"acc_stderr": 0.028057791672989017,
"acc_norm": 0.8080808080808081,
"acc_norm_stderr": 0.028057791672989017
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8911917098445595,
"acc_stderr": 0.022473253332768776,
"acc_norm": 0.8911917098445595,
"acc_norm_stderr": 0.022473253332768776
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6153846153846154,
"acc_stderr": 0.024666744915187208,
"acc_norm": 0.6153846153846154,
"acc_norm_stderr": 0.024666744915187208
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3037037037037037,
"acc_stderr": 0.02803792996911499,
"acc_norm": 0.3037037037037037,
"acc_norm_stderr": 0.02803792996911499
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.680672268907563,
"acc_stderr": 0.030283995525884396,
"acc_norm": 0.680672268907563,
"acc_norm_stderr": 0.030283995525884396
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.038020397601079024,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.038020397601079024
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8330275229357799,
"acc_stderr": 0.01599015488507338,
"acc_norm": 0.8330275229357799,
"acc_norm_stderr": 0.01599015488507338
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5092592592592593,
"acc_stderr": 0.034093869469927006,
"acc_norm": 0.5092592592592593,
"acc_norm_stderr": 0.034093869469927006
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7990196078431373,
"acc_stderr": 0.02812597226565437,
"acc_norm": 0.7990196078431373,
"acc_norm_stderr": 0.02812597226565437
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.810126582278481,
"acc_stderr": 0.02553010046023349,
"acc_norm": 0.810126582278481,
"acc_norm_stderr": 0.02553010046023349
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7040358744394619,
"acc_stderr": 0.030636591348699803,
"acc_norm": 0.7040358744394619,
"acc_norm_stderr": 0.030636591348699803
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7862595419847328,
"acc_stderr": 0.0359546161177469,
"acc_norm": 0.7862595419847328,
"acc_norm_stderr": 0.0359546161177469
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7603305785123967,
"acc_stderr": 0.03896878985070416,
"acc_norm": 0.7603305785123967,
"acc_norm_stderr": 0.03896878985070416
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7870370370370371,
"acc_stderr": 0.039578354719809805,
"acc_norm": 0.7870370370370371,
"acc_norm_stderr": 0.039578354719809805
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7852760736196319,
"acc_stderr": 0.032262193772867744,
"acc_norm": 0.7852760736196319,
"acc_norm_stderr": 0.032262193772867744
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5089285714285714,
"acc_stderr": 0.04745033255489123,
"acc_norm": 0.5089285714285714,
"acc_norm_stderr": 0.04745033255489123
},
"harness|hendrycksTest-management|5": {
"acc": 0.7766990291262136,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.7766990291262136,
"acc_norm_stderr": 0.04123553189891431
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8589743589743589,
"acc_stderr": 0.022801382534597528,
"acc_norm": 0.8589743589743589,
"acc_norm_stderr": 0.022801382534597528
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8301404853128991,
"acc_stderr": 0.013428186370608306,
"acc_norm": 0.8301404853128991,
"acc_norm_stderr": 0.013428186370608306
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7167630057803468,
"acc_stderr": 0.02425790170532338,
"acc_norm": 0.7167630057803468,
"acc_norm_stderr": 0.02425790170532338
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.3094972067039106,
"acc_stderr": 0.015461169002371542,
"acc_norm": 0.3094972067039106,
"acc_norm_stderr": 0.015461169002371542
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7549019607843137,
"acc_stderr": 0.024630048979824782,
"acc_norm": 0.7549019607843137,
"acc_norm_stderr": 0.024630048979824782
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.684887459807074,
"acc_stderr": 0.026385273703464485,
"acc_norm": 0.684887459807074,
"acc_norm_stderr": 0.026385273703464485
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7530864197530864,
"acc_stderr": 0.02399350170904211,
"acc_norm": 0.7530864197530864,
"acc_norm_stderr": 0.02399350170904211
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.5070921985815603,
"acc_stderr": 0.02982449855912901,
"acc_norm": 0.5070921985815603,
"acc_norm_stderr": 0.02982449855912901
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.46936114732724904,
"acc_stderr": 0.012746237711716634,
"acc_norm": 0.46936114732724904,
"acc_norm_stderr": 0.012746237711716634
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6838235294117647,
"acc_stderr": 0.028245687391462937,
"acc_norm": 0.6838235294117647,
"acc_norm_stderr": 0.028245687391462937
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.673202614379085,
"acc_stderr": 0.018975427920507215,
"acc_norm": 0.673202614379085,
"acc_norm_stderr": 0.018975427920507215
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6454545454545455,
"acc_stderr": 0.045820048415054174,
"acc_norm": 0.6454545454545455,
"acc_norm_stderr": 0.045820048415054174
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7387755102040816,
"acc_stderr": 0.028123429335142773,
"acc_norm": 0.7387755102040816,
"acc_norm_stderr": 0.028123429335142773
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8159203980099502,
"acc_stderr": 0.027403859410786845,
"acc_norm": 0.8159203980099502,
"acc_norm_stderr": 0.027403859410786845
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.87,
"acc_stderr": 0.033799766898963086,
"acc_norm": 0.87,
"acc_norm_stderr": 0.033799766898963086
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5602409638554217,
"acc_stderr": 0.03864139923699122,
"acc_norm": 0.5602409638554217,
"acc_norm_stderr": 0.03864139923699122
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8362573099415205,
"acc_stderr": 0.028380919596145866,
"acc_norm": 0.8362573099415205,
"acc_norm_stderr": 0.028380919596145866
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3598531211750306,
"mc1_stderr": 0.01680186046667716,
"mc2": 0.5229369986337806,
"mc2_stderr": 0.01525073227946668
},
"harness|winogrande|5": {
"acc": 0.7797947908445146,
"acc_stderr": 0.011646276755089688
},
"harness|gsm8k|5": {
"acc": 0.5595147839272175,
"acc_stderr": 0.01367457213169389
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_shahzebnaveed__NeuralHermes-2.5-Mistral-7B | [
"region:us"
] | 2024-02-16T18:40:58+00:00 | {"pretty_name": "Evaluation run of shahzebnaveed/NeuralHermes-2.5-Mistral-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [shahzebnaveed/NeuralHermes-2.5-Mistral-7B](https://huggingface.co/shahzebnaveed/NeuralHermes-2.5-Mistral-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_shahzebnaveed__NeuralHermes-2.5-Mistral-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T18:38:39.552249](https://huggingface.co/datasets/open-llm-leaderboard/details_shahzebnaveed__NeuralHermes-2.5-Mistral-7B/blob/main/results_2024-02-16T18-38-39.552249.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6389813616730637,\n \"acc_stderr\": 0.032241943281319616,\n \"acc_norm\": 0.6417845405458356,\n \"acc_norm_stderr\": 0.03288190466152383,\n \"mc1\": 0.3598531211750306,\n \"mc1_stderr\": 0.01680186046667716,\n \"mc2\": 0.5229369986337806,\n \"mc2_stderr\": 0.01525073227946668\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6126279863481229,\n \"acc_stderr\": 0.014235872487909869,\n \"acc_norm\": 0.6484641638225256,\n \"acc_norm_stderr\": 0.013952413699600931\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6519617606054571,\n \"acc_stderr\": 0.004753746951620152,\n \"acc_norm\": 0.8428599880501892,\n \"acc_norm_stderr\": 0.003631889496122536\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.02854479331905533,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.02854479331905533\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.036928207672648664,\n \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.036928207672648664\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.02546714904546955,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.02546714904546955\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083525,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083525\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.032250781083062896,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.032250781083062896\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8080808080808081,\n \"acc_stderr\": 0.028057791672989017,\n \"acc_norm\": 0.8080808080808081,\n \"acc_norm_stderr\": 0.028057791672989017\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.022473253332768776,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.022473253332768776\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6153846153846154,\n \"acc_stderr\": 0.024666744915187208,\n \"acc_norm\": 0.6153846153846154,\n \"acc_norm_stderr\": 0.024666744915187208\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3037037037037037,\n \"acc_stderr\": 0.02803792996911499,\n \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.02803792996911499\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.030283995525884396,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.030283995525884396\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8330275229357799,\n \"acc_stderr\": 0.01599015488507338,\n \"acc_norm\": 0.8330275229357799,\n \"acc_norm_stderr\": 0.01599015488507338\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7990196078431373,\n \"acc_stderr\": 0.02812597226565437,\n \"acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.02812597226565437\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.02553010046023349,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.02553010046023349\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7040358744394619,\n \"acc_stderr\": 0.030636591348699803,\n \"acc_norm\": 0.7040358744394619,\n \"acc_norm_stderr\": 0.030636591348699803\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070416,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070416\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.039578354719809805,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.039578354719809805\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.032262193772867744,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.032262193772867744\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597528,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597528\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8301404853128991,\n \"acc_stderr\": 0.013428186370608306,\n \"acc_norm\": 0.8301404853128991,\n \"acc_norm_stderr\": 0.013428186370608306\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7167630057803468,\n \"acc_stderr\": 0.02425790170532338,\n \"acc_norm\": 0.7167630057803468,\n \"acc_norm_stderr\": 0.02425790170532338\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3094972067039106,\n \"acc_stderr\": 0.015461169002371542,\n \"acc_norm\": 0.3094972067039106,\n \"acc_norm_stderr\": 0.015461169002371542\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.024630048979824782,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.024630048979824782\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.684887459807074,\n \"acc_stderr\": 0.026385273703464485,\n \"acc_norm\": 0.684887459807074,\n \"acc_norm_stderr\": 0.026385273703464485\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.02399350170904211,\n \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.02399350170904211\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5070921985815603,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.5070921985815603,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46936114732724904,\n \"acc_stderr\": 0.012746237711716634,\n \"acc_norm\": 0.46936114732724904,\n \"acc_norm_stderr\": 0.012746237711716634\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.028245687391462937,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.028245687391462937\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.673202614379085,\n \"acc_stderr\": 0.018975427920507215,\n \"acc_norm\": 0.673202614379085,\n \"acc_norm_stderr\": 0.018975427920507215\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142773,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142773\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8159203980099502,\n \"acc_stderr\": 0.027403859410786845,\n \"acc_norm\": 0.8159203980099502,\n \"acc_norm_stderr\": 0.027403859410786845\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3598531211750306,\n \"mc1_stderr\": 0.01680186046667716,\n \"mc2\": 0.5229369986337806,\n \"mc2_stderr\": 0.01525073227946668\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7797947908445146,\n \"acc_stderr\": 0.011646276755089688\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5595147839272175,\n \"acc_stderr\": 0.01367457213169389\n }\n}\n```", "repo_url": "https://huggingface.co/shahzebnaveed/NeuralHermes-2.5-Mistral-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|arc:challenge|25_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|gsm8k|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hellaswag|10_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T18-38-39.552249.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["**/details_harness|winogrande|5_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T18-38-39.552249.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T18_38_39.552249", "path": ["results_2024-02-16T18-38-39.552249.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T18-38-39.552249.parquet"]}]}]} | 2024-02-16T18:41:22+00:00 |
e4c524dc687c18fdc77e846b9c751b80957f2429 | Izazk/prepro_train_10 | [
"region:us"
] | 2024-02-16T18:45:12+00:00 | {} | 2024-02-16T18:46:42+00:00 |
|
669112bbebb584172c66cf6b995a753ef7590be8 | rocioadlc/data2 | [
"license:apache-2.0",
"region:us"
] | 2024-02-16T18:45:47+00:00 | {"license": "apache-2.0"} | 2024-02-16T21:53:05+00:00 |
|
285e6a96cf310ce6b2cc2c5b4594e03032e9736c | lucasjca/ProcedimentosSUS2 | [
"region:us"
] | 2024-02-16T18:47:03+00:00 | {"dataset_info": {"features": [{"name": "03.01.08.002-0.wav", "dtype": "string"}, {"name": "06.04.38.006-2.wav", "dtype": "string"}, {"name": "06.04.38.001-1.wav", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 420, "num_examples": 28}], "download_size": 2022, "dataset_size": 420}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T18:48:27+00:00 |
|
b18777b3d9c05b5b1e97c8a35d5e6d044065b4c6 | sakkke/text-to-command-gemini | [
"license:mit",
"region:us"
] | 2024-02-16T18:49:14+00:00 | {"license": "mit"} | 2024-02-16T18:49:52+00:00 |
|
5855016b354dbd056220770b9cb2e5d4d08374f4 | DanielPFlorian/github-issues | [
"region:us"
] | 2024-02-16T18:53:35+00:00 | {"dataset_info": {"features": [{"name": "url", "dtype": "string"}, {"name": "repository_url", "dtype": "string"}, {"name": "labels_url", "dtype": "string"}, {"name": "comments_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "number", "dtype": "int64"}, {"name": "title", "dtype": "string"}, {"name": "user", "struct": [{"name": "login", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "avatar_url", "dtype": "string"}, {"name": "gravatar_id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "followers_url", "dtype": "string"}, {"name": "following_url", "dtype": "string"}, {"name": "gists_url", "dtype": "string"}, {"name": "starred_url", "dtype": "string"}, {"name": "subscriptions_url", "dtype": "string"}, {"name": "organizations_url", "dtype": "string"}, {"name": "repos_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "received_events_url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "site_admin", "dtype": "bool"}]}, {"name": "labels", "list": [{"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "name", "dtype": "string"}, {"name": "color", "dtype": "string"}, {"name": "default", "dtype": "bool"}, {"name": "description", "dtype": "string"}]}, {"name": "state", "dtype": "string"}, {"name": "locked", "dtype": "bool"}, {"name": "assignee", "struct": [{"name": "login", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "avatar_url", "dtype": "string"}, {"name": "gravatar_id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "followers_url", "dtype": "string"}, {"name": "following_url", "dtype": "string"}, {"name": "gists_url", "dtype": "string"}, {"name": "starred_url", "dtype": "string"}, {"name": "subscriptions_url", "dtype": "string"}, {"name": "organizations_url", "dtype": "string"}, {"name": "repos_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "received_events_url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "site_admin", "dtype": "bool"}]}, {"name": "assignees", "list": [{"name": "login", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "avatar_url", "dtype": "string"}, {"name": "gravatar_id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "followers_url", "dtype": "string"}, {"name": "following_url", "dtype": "string"}, {"name": "gists_url", "dtype": "string"}, {"name": "starred_url", "dtype": "string"}, {"name": "subscriptions_url", "dtype": "string"}, {"name": "organizations_url", "dtype": "string"}, {"name": "repos_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "received_events_url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "site_admin", "dtype": "bool"}]}, {"name": "comments", "sequence": "string"}, {"name": "created_at", "dtype": "timestamp[s]"}, {"name": "updated_at", "dtype": "timestamp[s]"}, {"name": "closed_at", "dtype": "timestamp[s]"}, {"name": "author_association", "dtype": "string"}, {"name": "draft", "dtype": "bool"}, {"name": "pull_request", "struct": [{"name": "url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "diff_url", "dtype": "string"}, {"name": "patch_url", "dtype": "string"}, {"name": "merged_at", "dtype": "timestamp[s]"}]}, {"name": "body", "dtype": "string"}, {"name": "reactions", "struct": [{"name": "url", "dtype": "string"}, {"name": "total_count", "dtype": "int64"}, {"name": "+1", "dtype": "int64"}, {"name": "-1", "dtype": "int64"}, {"name": "laugh", "dtype": "int64"}, {"name": "hooray", "dtype": "int64"}, {"name": "confused", "dtype": "int64"}, {"name": "heart", "dtype": "int64"}, {"name": "rocket", "dtype": "int64"}, {"name": "eyes", "dtype": "int64"}]}, {"name": "timeline_url", "dtype": "string"}, {"name": "state_reason", "dtype": "string"}, {"name": "is_pull_request", "dtype": "bool"}], "splits": [{"name": "train", "num_bytes": 35285328, "num_examples": 6630}], "download_size": 10543429, "dataset_size": 35285328}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T18:53:45+00:00 |
|
8794a9f0eafdf3a64051fdafe15c98b1cc470ca9 | RafiBrent/BC_Data | [
"region:us"
] | 2024-02-16T18:59:22+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "ADI", "1": "BACK", "2": "DEB", "3": "LYM", "4": "MUC", "5": "MUS", "6": "NORM", "7": "STR", "8": "TUM"}}}}], "splits": [{"name": "train", "num_bytes": 13700396308.48, "num_examples": 89996}, {"name": "validation", "num_bytes": 1522935967.768, "num_examples": 10004}, {"name": "test", "num_bytes": 1093018719.36, "num_examples": 7180}], "download_size": 14369418825, "dataset_size": 16316350995.608}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-16T19:01:22+00:00 |
|
ec39d86e18a2a9dd1fb68a3f1b5c0b3ca7c5f2a9 | rewat7/tweet_sentiment | [
"region:us"
] | 2024-02-16T19:00:30+00:00 | {"configs": [{"config_name": "train_data", "data_files": "train.csv"}, {"config_name": "test_data", "data_files": "test.csv"}]} | 2024-02-16T19:10:39+00:00 |
|
c2f8f53466921d739173ee330d22521888bdf5f1 | lucasjca/ProcedimentosSUS3 | [
"region:us"
] | 2024-02-16T19:07:22+00:00 | {"dataset_info": {"features": [{"name": "audio", "dtype": "audio"}], "splits": [{"name": "train", "num_bytes": 41920700.0, "num_examples": 89}], "download_size": 41553598, "dataset_size": 41920700.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T19:09:49+00:00 |
|
e8ae08008daea65e9f7f5d100910a149996be821 | indiehackers/tenglish_wikipedia | [
"region:us"
] | 2024-02-16T19:13:33+00:00 | {"dataset_info": {"features": [{"name": "translit", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 314781915, "num_examples": 87854}], "download_size": 131325063, "dataset_size": 314781915}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T19:13:53+00:00 |
|
fbfe4434dc0363a87a37384feee5fd5b9d263cf6 | Dataset for ShawGPT, a fine-tuned data science YouTube comment responder.
Video link: *coming soon!* <br>
Blog link: *coming soon!* | shawhin/shawgpt-youtube-comments | [
"size_categories:n<1K",
"license:mit",
"region:us"
] | 2024-02-16T19:14:02+00:00 | {"license": "mit", "size_categories": ["n<1K"], "dataset_info": {"features": [{"name": "example", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 41700, "num_examples": 50}, {"name": "test", "num_bytes": 7489, "num_examples": 9}], "download_size": 27338, "dataset_size": 49189}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-16T20:14:38+00:00 |
a812370a8384c20a2db2c58aa83fbbcc5736248e | GGital/Signal_Test03 | [
"region:us"
] | 2024-02-16T19:16:43+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "0", "1": "1", "2": "2", "3": "3", "4": "4", "5": "5", "6": "6"}}}}], "splits": [{"name": "train", "num_bytes": 17878235.0, "num_examples": 647}], "download_size": 17876811, "dataset_size": 17878235.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T19:16:49+00:00 |
|
16c22d326504ac71adab3a6f198a278858b81b33 | # Dataset Card for Academic Essay Prompt-Completion Pairs
## Dataset Description
This dataset is designed to distinguish between essays authored by students and those generated by Large Language Models (LLMs), offering an essential resource for researchers and practitioners in natural language processing, educational technology, and academic integrity. Hosted on Huggingface, it supports the development and evaluation of models aimed at identifying the origin of textual content, which is crucial for a variety of applications including enhancing automated grading systems and detecting AI-generated text in academic submissions.
### Dataset Overview
The dataset consists of prompt-completion pairs, carefully crafted to simulate real-world academic writing scenarios. Each entry within the dataset is uniquely structured, encapsulated by `<s>` and `</s>` tags, ensuring a standardized format. Within these tags, the prompt is specified between `[INST]` and `[/INST]` tags, comprising the Source Text, Essay Instructions, and the Essay. The completion, positioned outside the `[INST]` and `[/INST]` tags but still within the `<s>` and `</s>` encapsulation, categorically states the essay's origin—either as "This essay was written by an actual student." or "This essay was generated by a Large Language Model." This arrangement provides a nuanced classification task, focusing on discerning student-written essays from machine-generated ones.
### Structure
Upon utilizing the `load_dataset` command on Huggingface to access the dataset, users will encounter two primary splits:
- **Train:** Accounts for approximately 70% of the dataset, tailored for the training of machine learning models.
- **Test:** Comprises the remaining 30%, designated for the assessment of the models' performance.
#### Fields
Each entry in the dataset is meticulously structured to include:
- **Prompt:** Located within `[INST]` and `[/INST]` tags and encapsulated by `<s>` and `</s>` tags, the prompt includes the Source Text, Essay Instructions, and the Essay.
- **Completion:** Situated outside the instructional tags yet within the `<s>` and `</s>` encapsulation, the completion provides a definitive statement regarding the essay's authorship, indicating it was either "student-written" or "machine-generated."
### Use Cases
This dataset is exceptionally suited for:
- Crafting algorithms that can autonomously distinguish between human-authored and AI-generated text.
- Reinforcing academic integrity tools by identifying submissions that may be AI-generated.
- Enhancing the capabilities of automated essay scoring systems by introducing them to a wide variety of textual origins.
- Conducting in-depth research in natural language understanding, particularly in exploring the stylistic and content-based differences between human and AI authors.
### Accessing the Dataset
To access and load the dataset into Python environments, use the following command through Huggingface's `datasets` library:
```python
from datasets import load_dataset
dataset = load_dataset("knarasi1/student_and_llm_essays")
```
### Acknowledgments
This dataset represents a collective endeavor to foster innovation and uphold integrity in academic writing and research. It underscores the community's dedication to improving interactions between humans and AI within educational frameworks.
### Disclaimer
Dataset users are urged to employ this resource ethically and responsibly, especially in light of its potential impact on educational and research settings. The creators of the dataset and Huggingface explicitly discourage the misuse of AI-generated text for academic dishonesty or any form of deception. | knarasi1/student_and_llm_essays | [
"region:us"
] | 2024-02-16T19:19:16+00:00 | {} | 2024-02-16T19:42:26+00:00 |
e0c6c4fdc6074df42e8af3a63f686c634a3165b9 | alisson40889/loka | [
"license:openrail",
"region:us"
] | 2024-02-16T19:26:35+00:00 | {"license": "openrail"} | 2024-02-16T19:27:32+00:00 |
|
964984dfe8f2614d2a831a7ee5b5018fb76abcf7 | # Dataset Card for Dataset Name
<!-- Provide a quick summary of the dataset. -->
These datasets has been used in TooT-PLM-ionCT tool which is a composite framework consisting of three distinct systems, each with different architectures and trained on unique datasets. Each system within TooT-PLM-ionCT is dedicated to a specific task: segregating ion channels (ICs) and ion transporters (ITs) from other membrane proteins and differentiating ICs from ITs.
- **Curated by:** Hamed Ghazikhani
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** UniProt/SwissProt
- **Paper [optional]:** https://www.biorxiv.org/content/10.1101/2023.07.11.548644v1.abstract
- **Demo [optional]:** https://tootsuite.encs.concordia.ca/toot_plm_ionct
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
@misc{ghazikhani_exploiting_2023,
title = {Exploiting protein language models for the precise classification of ion channels and ion transporters},
copyright = {© 2023, Posted by Cold Spring Harbor Laboratory. This pre-print is available under a Creative Commons License (Attribution 4.0 International), CC BY 4.0, as described at http://creativecommons.org/licenses/by/4.0/},
url = {https://www.biorxiv.org/content/10.1101/2023.07.11.548644v1},
doi = {10.1101/2023.07.11.548644},
abstract = {This study presents TooT-PLM-ionCT, a composite framework consisting of three distinct systems, each with different architectures and trained on unique datasets. Each system within TooT-PLM-ionCT is dedicated to a specific task: segregating ion channels (ICs) and ion transporters (ITs) from other membrane proteins and differentiating ICs from ITs. These systems exploit the capabilities of six diverse Protein Language Models (PLMs) - ProtBERT, ProtBERT-BFD, ESM-1b, ESM-2 (650M parameters), and ESM-2 (15B parameters). As these proteins play a pivotal role in the regulation of ion movement across cellular membranes, they are integral to numerous biological processes and overall cellular vitality. To circumvent the costly and time-consuming nature of wet lab experiments, we harness the predictive prowess of PLMs, drawing parallels with techniques in natural language processing. Our strategy engages six classifiers, embracing both conventional methodologies and a deep learning model, for each of our defined tasks. Furthermore, we delve into critical factors influencing our tasks, including the implications of dataset balancing, the effect of frozen versus fine-tuned PLM representations, and the potential variance between half and full precision floating-point computations. Our empirical results showcase superior performance in distinguishing ITs from other membrane proteins and differentiating ICs from ITs, while the task of discriminating ICs from other membrane proteins exhibits results commensurate with the current state-of-the-art.},
language = {en},
urldate = {2023-07-31},
publisher = {bioRxiv},
author = {Ghazikhani, Hamed and Butler, Gregory},
month = jul,
year = {2023},
note = {Pages: 2023.07.11.548644
Section: New Results},
file = {Full Text PDF:/Users/hamedghazikhani/Zotero/storage/NVPQKEMJ/Ghazikhani and Butler - 2023 - Exploiting protein language models for the precise.pdf:application/pdf},
}
| ghazikhanihamed/TooT-PLM-ionCT_DB | [
"task_categories:text-classification",
"license:mit",
"region:us"
] | 2024-02-16T19:40:46+00:00 | {"license": "mit", "task_categories": ["text-classification"]} | 2024-02-16T19:53:27+00:00 |
9e312099a8e5031f2a22df917a93b589cae578b6 |
# Dataset Card for Evaluation run of DatPySci/pythia-1b-dpo-full
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [DatPySci/pythia-1b-dpo-full](https://huggingface.co/DatPySci/pythia-1b-dpo-full) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_DatPySci__pythia-1b-dpo-full",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-16T19:40:16.291334](https://huggingface.co/datasets/open-llm-leaderboard/details_DatPySci__pythia-1b-dpo-full/blob/main/results_2024-02-16T19-40-16.291334.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.24547450110873706,
"acc_stderr": 0.030321295510317755,
"acc_norm": 0.24643856591402344,
"acc_norm_stderr": 0.03105260906412087,
"mc1": 0.2252141982864137,
"mc1_stderr": 0.014623240768023496,
"mc2": 0.3726747548681276,
"mc2_stderr": 0.014362441702987668
},
"harness|arc:challenge|25": {
"acc": 0.2781569965870307,
"acc_stderr": 0.013094469919538814,
"acc_norm": 0.29436860068259385,
"acc_norm_stderr": 0.013318528460539424
},
"harness|hellaswag|10": {
"acc": 0.38657637920732923,
"acc_stderr": 0.004859699562451461,
"acc_norm": 0.4903405696076479,
"acc_norm_stderr": 0.004988850185477489
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.22962962962962963,
"acc_stderr": 0.03633384414073465,
"acc_norm": 0.22962962962962963,
"acc_norm_stderr": 0.03633384414073465
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.16447368421052633,
"acc_stderr": 0.0301675334686327,
"acc_norm": 0.16447368421052633,
"acc_norm_stderr": 0.0301675334686327
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.17,
"acc_stderr": 0.03775251680686371,
"acc_norm": 0.17,
"acc_norm_stderr": 0.03775251680686371
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.27169811320754716,
"acc_stderr": 0.02737770662467071,
"acc_norm": 0.27169811320754716,
"acc_norm_stderr": 0.02737770662467071
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.034765901043041336,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.034765901043041336
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.19,
"acc_stderr": 0.039427724440366255,
"acc_norm": 0.19,
"acc_norm_stderr": 0.039427724440366255
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.23,
"acc_stderr": 0.042295258468165065,
"acc_norm": 0.23,
"acc_norm_stderr": 0.042295258468165065
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.24855491329479767,
"acc_stderr": 0.03295304696818318,
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.03295304696818318
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.23529411764705882,
"acc_stderr": 0.04220773659171452,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.04220773659171452
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909284
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.23404255319148937,
"acc_stderr": 0.027678452578212394,
"acc_norm": 0.23404255319148937,
"acc_norm_stderr": 0.027678452578212394
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.03999423879281337,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.03999423879281337
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.1724137931034483,
"acc_stderr": 0.031478307902595745,
"acc_norm": 0.1724137931034483,
"acc_norm_stderr": 0.031478307902595745
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.2566137566137566,
"acc_stderr": 0.022494510767503154,
"acc_norm": 0.2566137566137566,
"acc_norm_stderr": 0.022494510767503154
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.25396825396825395,
"acc_stderr": 0.03893259610604673,
"acc_norm": 0.25396825396825395,
"acc_norm_stderr": 0.03893259610604673
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.23548387096774193,
"acc_stderr": 0.02413763242933771,
"acc_norm": 0.23548387096774193,
"acc_norm_stderr": 0.02413763242933771
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.20689655172413793,
"acc_stderr": 0.028501378167893946,
"acc_norm": 0.20689655172413793,
"acc_norm_stderr": 0.028501378167893946
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.18787878787878787,
"acc_stderr": 0.03050193405942914,
"acc_norm": 0.18787878787878787,
"acc_norm_stderr": 0.03050193405942914
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.17676767676767677,
"acc_stderr": 0.027178752639044915,
"acc_norm": 0.17676767676767677,
"acc_norm_stderr": 0.027178752639044915
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.21761658031088082,
"acc_stderr": 0.029778663037752947,
"acc_norm": 0.21761658031088082,
"acc_norm_stderr": 0.029778663037752947
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.21025641025641026,
"acc_stderr": 0.020660597485026924,
"acc_norm": 0.21025641025641026,
"acc_norm_stderr": 0.020660597485026924
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.02730914058823019,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.02730914058823019
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.21008403361344538,
"acc_stderr": 0.026461398717471874,
"acc_norm": 0.21008403361344538,
"acc_norm_stderr": 0.026461398717471874
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.25165562913907286,
"acc_stderr": 0.03543304234389985,
"acc_norm": 0.25165562913907286,
"acc_norm_stderr": 0.03543304234389985
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.29724770642201837,
"acc_stderr": 0.01959570722464353,
"acc_norm": 0.29724770642201837,
"acc_norm_stderr": 0.01959570722464353
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.375,
"acc_stderr": 0.033016908987210894,
"acc_norm": 0.375,
"acc_norm_stderr": 0.033016908987210894
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.19607843137254902,
"acc_stderr": 0.027865942286639325,
"acc_norm": 0.19607843137254902,
"acc_norm_stderr": 0.027865942286639325
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.2742616033755274,
"acc_stderr": 0.029041333510598018,
"acc_norm": 0.2742616033755274,
"acc_norm_stderr": 0.029041333510598018
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.3004484304932735,
"acc_stderr": 0.030769352008229143,
"acc_norm": 0.3004484304932735,
"acc_norm_stderr": 0.030769352008229143
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.22900763358778625,
"acc_stderr": 0.036853466317118506,
"acc_norm": 0.22900763358778625,
"acc_norm_stderr": 0.036853466317118506
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.2396694214876033,
"acc_stderr": 0.03896878985070416,
"acc_norm": 0.2396694214876033,
"acc_norm_stderr": 0.03896878985070416
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.04133119440243839,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.04133119440243839
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.2331288343558282,
"acc_stderr": 0.0332201579577674,
"acc_norm": 0.2331288343558282,
"acc_norm_stderr": 0.0332201579577674
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.04287858751340456,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.04287858751340456
},
"harness|hendrycksTest-management|5": {
"acc": 0.1941747572815534,
"acc_stderr": 0.03916667762822585,
"acc_norm": 0.1941747572815534,
"acc_norm_stderr": 0.03916667762822585
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.2948717948717949,
"acc_stderr": 0.029872577708891162,
"acc_norm": 0.2948717948717949,
"acc_norm_stderr": 0.029872577708891162
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542129,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542129
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.280970625798212,
"acc_stderr": 0.016073127851221235,
"acc_norm": 0.280970625798212,
"acc_norm_stderr": 0.016073127851221235
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.24855491329479767,
"acc_stderr": 0.023267528432100174,
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.023267528432100174
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.24692737430167597,
"acc_stderr": 0.014422292204808835,
"acc_norm": 0.24692737430167597,
"acc_norm_stderr": 0.014422292204808835
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.023929155517351284,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.023929155517351284
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.26688102893890675,
"acc_stderr": 0.02512263760881665,
"acc_norm": 0.26688102893890675,
"acc_norm_stderr": 0.02512263760881665
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.2716049382716049,
"acc_stderr": 0.02474862449053737,
"acc_norm": 0.2716049382716049,
"acc_norm_stderr": 0.02474862449053737
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.2375886524822695,
"acc_stderr": 0.025389512552729903,
"acc_norm": 0.2375886524822695,
"acc_norm_stderr": 0.025389512552729903
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.23989569752281617,
"acc_stderr": 0.010906282617981648,
"acc_norm": 0.23989569752281617,
"acc_norm_stderr": 0.010906282617981648
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.2757352941176471,
"acc_stderr": 0.027146271936625162,
"acc_norm": 0.2757352941176471,
"acc_norm_stderr": 0.027146271936625162
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.25326797385620914,
"acc_stderr": 0.017593486895366835,
"acc_norm": 0.25326797385620914,
"acc_norm_stderr": 0.017593486895366835
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.04069306319721378,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.04069306319721378
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.17959183673469387,
"acc_stderr": 0.024573293589585637,
"acc_norm": 0.17959183673469387,
"acc_norm_stderr": 0.024573293589585637
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.263681592039801,
"acc_stderr": 0.031157150869355554,
"acc_norm": 0.263681592039801,
"acc_norm_stderr": 0.031157150869355554
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.22,
"acc_stderr": 0.041633319989322695,
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322695
},
"harness|hendrycksTest-virology|5": {
"acc": 0.24096385542168675,
"acc_stderr": 0.0332939411907353,
"acc_norm": 0.24096385542168675,
"acc_norm_stderr": 0.0332939411907353
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.1871345029239766,
"acc_stderr": 0.02991312723236805,
"acc_norm": 0.1871345029239766,
"acc_norm_stderr": 0.02991312723236805
},
"harness|truthfulqa:mc|0": {
"mc1": 0.2252141982864137,
"mc1_stderr": 0.014623240768023496,
"mc2": 0.3726747548681276,
"mc2_stderr": 0.014362441702987668
},
"harness|winogrande|5": {
"acc": 0.5343330702446725,
"acc_stderr": 0.014019317531542565
},
"harness|gsm8k|5": {
"acc": 0.019711902956785442,
"acc_stderr": 0.0038289829787357082
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_DatPySci__pythia-1b-dpo-full | [
"region:us"
] | 2024-02-16T19:41:59+00:00 | {"pretty_name": "Evaluation run of DatPySci/pythia-1b-dpo-full", "dataset_summary": "Dataset automatically created during the evaluation run of model [DatPySci/pythia-1b-dpo-full](https://huggingface.co/DatPySci/pythia-1b-dpo-full) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_DatPySci__pythia-1b-dpo-full\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T19:40:16.291334](https://huggingface.co/datasets/open-llm-leaderboard/details_DatPySci__pythia-1b-dpo-full/blob/main/results_2024-02-16T19-40-16.291334.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24547450110873706,\n \"acc_stderr\": 0.030321295510317755,\n \"acc_norm\": 0.24643856591402344,\n \"acc_norm_stderr\": 0.03105260906412087,\n \"mc1\": 0.2252141982864137,\n \"mc1_stderr\": 0.014623240768023496,\n \"mc2\": 0.3726747548681276,\n \"mc2_stderr\": 0.014362441702987668\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.2781569965870307,\n \"acc_stderr\": 0.013094469919538814,\n \"acc_norm\": 0.29436860068259385,\n \"acc_norm_stderr\": 0.013318528460539424\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.38657637920732923,\n \"acc_stderr\": 0.004859699562451461,\n \"acc_norm\": 0.4903405696076479,\n \"acc_norm_stderr\": 0.004988850185477489\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.22962962962962963,\n \"acc_stderr\": 0.03633384414073465,\n \"acc_norm\": 0.22962962962962963,\n \"acc_norm_stderr\": 0.03633384414073465\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.16447368421052633,\n \"acc_stderr\": 0.0301675334686327,\n \"acc_norm\": 0.16447368421052633,\n \"acc_norm_stderr\": 0.0301675334686327\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.27169811320754716,\n \"acc_stderr\": 0.02737770662467071,\n \"acc_norm\": 0.27169811320754716,\n \"acc_norm_stderr\": 0.02737770662467071\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.034765901043041336,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.034765901043041336\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.19,\n \"acc_stderr\": 0.039427724440366255,\n \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.039427724440366255\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.03295304696818318,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.03295304696818318\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171452,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171452\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.027678452578212394,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.027678452578212394\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.03999423879281337,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.03999423879281337\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.1724137931034483,\n \"acc_stderr\": 0.031478307902595745,\n \"acc_norm\": 0.1724137931034483,\n \"acc_norm_stderr\": 0.031478307902595745\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2566137566137566,\n \"acc_stderr\": 0.022494510767503154,\n \"acc_norm\": 0.2566137566137566,\n \"acc_norm_stderr\": 0.022494510767503154\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.25396825396825395,\n \"acc_stderr\": 0.03893259610604673,\n \"acc_norm\": 0.25396825396825395,\n \"acc_norm_stderr\": 0.03893259610604673\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.23548387096774193,\n \"acc_stderr\": 0.02413763242933771,\n \"acc_norm\": 0.23548387096774193,\n \"acc_norm_stderr\": 0.02413763242933771\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.20689655172413793,\n \"acc_stderr\": 0.028501378167893946,\n \"acc_norm\": 0.20689655172413793,\n \"acc_norm_stderr\": 0.028501378167893946\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.18787878787878787,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.18787878787878787,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.21761658031088082,\n \"acc_stderr\": 0.029778663037752947,\n \"acc_norm\": 0.21761658031088082,\n \"acc_norm_stderr\": 0.029778663037752947\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.21025641025641026,\n \"acc_stderr\": 0.020660597485026924,\n \"acc_norm\": 0.21025641025641026,\n \"acc_norm_stderr\": 0.020660597485026924\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.02730914058823019,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.02730914058823019\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.25165562913907286,\n \"acc_stderr\": 0.03543304234389985,\n \"acc_norm\": 0.25165562913907286,\n \"acc_norm_stderr\": 0.03543304234389985\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.29724770642201837,\n \"acc_stderr\": 0.01959570722464353,\n \"acc_norm\": 0.29724770642201837,\n \"acc_norm_stderr\": 0.01959570722464353\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.375,\n \"acc_stderr\": 0.033016908987210894,\n \"acc_norm\": 0.375,\n \"acc_norm_stderr\": 0.033016908987210894\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.027865942286639325,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.027865942286639325\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2742616033755274,\n \"acc_stderr\": 0.029041333510598018,\n \"acc_norm\": 0.2742616033755274,\n \"acc_norm_stderr\": 0.029041333510598018\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3004484304932735,\n \"acc_stderr\": 0.030769352008229143,\n \"acc_norm\": 0.3004484304932735,\n \"acc_norm_stderr\": 0.030769352008229143\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.22900763358778625,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.22900763358778625,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070416,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070416\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2331288343558282,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.2331288343558282,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04287858751340456,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04287858751340456\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.1941747572815534,\n \"acc_stderr\": 0.03916667762822585,\n \"acc_norm\": 0.1941747572815534,\n \"acc_norm_stderr\": 0.03916667762822585\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2948717948717949,\n \"acc_stderr\": 0.029872577708891162,\n \"acc_norm\": 0.2948717948717949,\n \"acc_norm_stderr\": 0.029872577708891162\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542129,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542129\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.280970625798212,\n \"acc_stderr\": 0.016073127851221235,\n \"acc_norm\": 0.280970625798212,\n \"acc_norm_stderr\": 0.016073127851221235\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n \"acc_stderr\": 0.014422292204808835,\n \"acc_norm\": 0.24692737430167597,\n \"acc_norm_stderr\": 0.014422292204808835\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.26688102893890675,\n \"acc_stderr\": 0.02512263760881665,\n \"acc_norm\": 0.26688102893890675,\n \"acc_norm_stderr\": 0.02512263760881665\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2716049382716049,\n \"acc_stderr\": 0.02474862449053737,\n \"acc_norm\": 0.2716049382716049,\n \"acc_norm_stderr\": 0.02474862449053737\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2375886524822695,\n \"acc_stderr\": 0.025389512552729903,\n \"acc_norm\": 0.2375886524822695,\n \"acc_norm_stderr\": 0.025389512552729903\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.23989569752281617,\n \"acc_stderr\": 0.010906282617981648,\n \"acc_norm\": 0.23989569752281617,\n \"acc_norm_stderr\": 0.010906282617981648\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.2757352941176471,\n \"acc_stderr\": 0.027146271936625162,\n \"acc_norm\": 0.2757352941176471,\n \"acc_norm_stderr\": 0.027146271936625162\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25326797385620914,\n \"acc_stderr\": 0.017593486895366835,\n \"acc_norm\": 0.25326797385620914,\n \"acc_norm_stderr\": 0.017593486895366835\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.23636363636363636,\n \"acc_stderr\": 0.04069306319721378,\n \"acc_norm\": 0.23636363636363636,\n \"acc_norm_stderr\": 0.04069306319721378\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.17959183673469387,\n \"acc_stderr\": 0.024573293589585637,\n \"acc_norm\": 0.17959183673469387,\n \"acc_norm_stderr\": 0.024573293589585637\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.263681592039801,\n \"acc_stderr\": 0.031157150869355554,\n \"acc_norm\": 0.263681592039801,\n \"acc_norm_stderr\": 0.031157150869355554\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.24096385542168675,\n \"acc_stderr\": 0.0332939411907353,\n \"acc_norm\": 0.24096385542168675,\n \"acc_norm_stderr\": 0.0332939411907353\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.1871345029239766,\n \"acc_stderr\": 0.02991312723236805,\n \"acc_norm\": 0.1871345029239766,\n \"acc_norm_stderr\": 0.02991312723236805\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2252141982864137,\n \"mc1_stderr\": 0.014623240768023496,\n \"mc2\": 0.3726747548681276,\n \"mc2_stderr\": 0.014362441702987668\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5343330702446725,\n \"acc_stderr\": 0.014019317531542565\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.019711902956785442,\n \"acc_stderr\": 0.0038289829787357082\n }\n}\n```", "repo_url": "https://huggingface.co/DatPySci/pythia-1b-dpo-full", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|arc:challenge|25_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|gsm8k|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hellaswag|10_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T19-40-16.291334.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["**/details_harness|winogrande|5_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T19-40-16.291334.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T19_40_16.291334", "path": ["results_2024-02-16T19-40-16.291334.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T19-40-16.291334.parquet"]}]}]} | 2024-02-16T19:42:23+00:00 |
a9280f41438e3033063558104993e176af2f08fe | dzjxzyd/rhea_uniprot_reaction_large_super | [
"region:us"
] | 2024-02-16T19:42:09+00:00 | {} | 2024-02-16T19:57:24+00:00 |
|
bc74750063e36322398f9dbf3af7d762c3253951 | Yeerchiu/mmm_lmd_8bars_new | [
"region:us"
] | 2024-02-16T19:44:58+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 3140821056, "num_examples": 177567}], "download_size": 490285218, "dataset_size": 3140821056}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T19:45:46+00:00 |
|
570aec4096bbf95de5b7d85f28a0faf591554e09 | WIP
| manestay/borderlines | [
"region:us"
] | 2024-02-16T19:46:26+00:00 | {"configs": [{"config_name": "territories", "data_files": "disputed_territories.csv"}, {"config_name": "countries", "data_files": "countries_info.csv"}, {"config_name": "queries", "data_files": [{"split": "iw", "path": "queries/iw.csv"}, {"split": "sq", "path": "queries/sq.csv"}, {"split": "ar", "path": "queries/ar.csv"}, {"split": "az", "path": "queries/az.csv"}, {"split": "bn", "path": "queries/bn.csv"}, {"split": "bs", "path": "queries/bs.csv"}, {"split": "da", "path": "queries/da.csv"}, {"split": "el", "path": "queries/el.csv"}, {"split": "en", "path": "queries/en.csv"}, {"split": "es", "path": "queries/es.csv"}, {"split": "fa", "path": "queries/fa.csv"}, {"split": "fr", "path": "queries/fr.csv"}, {"split": "hi", "path": "queries/hi.csv"}, {"split": "hr", "path": "queries/hr.csv"}, {"split": "ht", "path": "queries/ht.csv"}, {"split": "hy", "path": "queries/hy.csv"}, {"split": "id", "path": "queries/id.csv"}, {"split": "is", "path": "queries/is.csv"}, {"split": "it", "path": "queries/it.csv"}, {"split": "ja", "path": "queries/ja.csv"}, {"split": "ka", "path": "queries/ka.csv"}, {"split": "km", "path": "queries/km.csv"}, {"split": "ko", "path": "queries/ko.csv"}, {"split": "ky", "path": "queries/ky.csv"}, {"split": "lo", "path": "queries/lo.csv"}, {"split": "mg", "path": "queries/mg.csv"}, {"split": "mn", "path": "queries/mn.csv"}, {"split": "ms", "path": "queries/ms.csv"}, {"split": "my", "path": "queries/my.csv"}, {"split": "ne", "path": "queries/ne.csv"}, {"split": "nl", "path": "queries/nl.csv"}, {"split": "pt", "path": "queries/pt.csv"}, {"split": "ru", "path": "queries/ru.csv"}, {"split": "sl", "path": "queries/sl.csv"}, {"split": "sn", "path": "queries/sn.csv"}, {"split": "so", "path": "queries/so.csv"}, {"split": "sr", "path": "queries/sr.csv"}, {"split": "sw", "path": "queries/sw.csv"}, {"split": "tg", "path": "queries/tg.csv"}, {"split": "th", "path": "queries/th.csv"}, {"split": "ti", "path": "queries/ti.csv"}, {"split": "tl", "path": "queries/tl.csv"}, {"split": "tr", "path": "queries/tr.csv"}, {"split": "uk", "path": "queries/uk.csv"}, {"split": "ur", "path": "queries/ur.csv"}, {"split": "uz", "path": "queries/uz.csv"}, {"split": "vi", "path": "queries/vi.csv"}, {"split": "zht", "path": "queries/zh-TW.csv"}, {"split": "zhs", "path": "queries/zh-CN.csv"}]}]} | 2024-02-17T00:21:20+00:00 |
d1c8e07e37b78388ba77c700fe7f0364eebd857b | Juunge/danske-ordsprog | [
"region:us"
] | 2024-02-16T19:52:22+00:00 | {"dataset_info": {"features": [{"name": "saying", "dtype": "string"}, {"name": "meaning", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 35026, "num_examples": 400}], "download_size": 22640, "dataset_size": 35026}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T19:52:23+00:00 |
|
b66e00a2d988d5dc70ef2d83c707170853b78a86 | GGital/Signal_Test04 | [
"region:us"
] | 2024-02-16T19:52:54+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "0", "1": "1", "2": "2", "3": "3", "4": "4", "5": "5", "6": "6"}}}}], "splits": [{"name": "train", "num_bytes": 23704098.0, "num_examples": 647}], "download_size": 23733105, "dataset_size": 23704098.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T19:53:00+00:00 |
|
1eb53d79c8a284f483091d3ecc86d6f16bdbf8a1 | hotal/cowrie_logs | [
"region:us"
] | 2024-02-16T19:57:31+00:00 | {"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "command", "dtype": "string"}, {"name": "response", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 38259, "num_examples": 74}], "download_size": 9909, "dataset_size": 38259}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T20:06:18+00:00 |
|
611c23222c4ea1fdebaa63aa8c797b8bfa019697 | jhaberbe/lipid-droplets | [
"license:mit",
"region:us"
] | 2024-02-16T20:00:45+00:00 | {"license": "mit", "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "pixel_values", "dtype": "image"}, {"name": "labels", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 17759568.0, "num_examples": 16}, {"name": "validation", "num_bytes": 16724901.0, "num_examples": 15}], "download_size": 34507742, "dataset_size": 34484469.0}} | 2024-02-16T20:33:28+00:00 |
|
901e017600b5599f1ef26be5e49cc9c71a40443b | # Dataset Card for "lipid_droplets"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | jhaberbe/lipid_droplets | [
"region:us"
] | 2024-02-16T20:04:21+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 17759568.0, "num_examples": 16}, {"name": "validation", "num_bytes": 16724901.0, "num_examples": 15}], "download_size": 0, "dataset_size": 34484469.0}} | 2024-02-16T20:15:14+00:00 |
845b6387615963d13d86bca1990f0e3d7f8b0429 | geovanezzz/vozviniboy | [
"license:openrail",
"region:us"
] | 2024-02-16T20:05:27+00:00 | {"license": "openrail"} | 2024-02-16T20:05:39+00:00 |
|
d314e9a00e210848c69bb8c647b14f0154678296 |
# Dataset Card for Evaluation run of chasedreaminf/Dream-7B-slerp
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [chasedreaminf/Dream-7B-slerp](https://huggingface.co/chasedreaminf/Dream-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_chasedreaminf__Dream-7B-slerp",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-16T20:05:27.586603](https://huggingface.co/datasets/open-llm-leaderboard/details_chasedreaminf__Dream-7B-slerp/blob/main/results_2024-02-16T20-05-27.586603.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6507213038118388,
"acc_stderr": 0.03214268781825063,
"acc_norm": 0.6503644948036131,
"acc_norm_stderr": 0.03280953874341137,
"mc1": 0.4724602203182375,
"mc1_stderr": 0.017476930190712187,
"mc2": 0.6184735043628918,
"mc2_stderr": 0.015107906651203224
},
"harness|arc:challenge|25": {
"acc": 0.6663822525597269,
"acc_stderr": 0.013778687054176538,
"acc_norm": 0.6851535836177475,
"acc_norm_stderr": 0.013572657703084948
},
"harness|hellaswag|10": {
"acc": 0.6787492531368253,
"acc_stderr": 0.004660025270817022,
"acc_norm": 0.8634734116709819,
"acc_norm_stderr": 0.0034264517445078474
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6296296296296297,
"acc_stderr": 0.041716541613545426,
"acc_norm": 0.6296296296296297,
"acc_norm_stderr": 0.041716541613545426
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6842105263157895,
"acc_stderr": 0.0378272898086547,
"acc_norm": 0.6842105263157895,
"acc_norm_stderr": 0.0378272898086547
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.63,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.63,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7056603773584905,
"acc_stderr": 0.02804918631569525,
"acc_norm": 0.7056603773584905,
"acc_norm_stderr": 0.02804918631569525
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7569444444444444,
"acc_stderr": 0.03586879280080341,
"acc_norm": 0.7569444444444444,
"acc_norm_stderr": 0.03586879280080341
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6647398843930635,
"acc_stderr": 0.03599586301247077,
"acc_norm": 0.6647398843930635,
"acc_norm_stderr": 0.03599586301247077
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.45098039215686275,
"acc_stderr": 0.049512182523962625,
"acc_norm": 0.45098039215686275,
"acc_norm_stderr": 0.049512182523962625
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5957446808510638,
"acc_stderr": 0.032081157507886836,
"acc_norm": 0.5957446808510638,
"acc_norm_stderr": 0.032081157507886836
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5,
"acc_stderr": 0.047036043419179864,
"acc_norm": 0.5,
"acc_norm_stderr": 0.047036043419179864
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5448275862068965,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.5448275862068965,
"acc_norm_stderr": 0.04149886942192117
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.42328042328042326,
"acc_stderr": 0.02544636563440679,
"acc_norm": 0.42328042328042326,
"acc_norm_stderr": 0.02544636563440679
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.0442626668137991,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.0442626668137991
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7677419354838709,
"acc_stderr": 0.024022256130308235,
"acc_norm": 0.7677419354838709,
"acc_norm_stderr": 0.024022256130308235
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4975369458128079,
"acc_stderr": 0.03517945038691063,
"acc_norm": 0.4975369458128079,
"acc_norm_stderr": 0.03517945038691063
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7696969696969697,
"acc_stderr": 0.0328766675860349,
"acc_norm": 0.7696969696969697,
"acc_norm_stderr": 0.0328766675860349
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.797979797979798,
"acc_stderr": 0.02860620428922987,
"acc_norm": 0.797979797979798,
"acc_norm_stderr": 0.02860620428922987
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8911917098445595,
"acc_stderr": 0.022473253332768766,
"acc_norm": 0.8911917098445595,
"acc_norm_stderr": 0.022473253332768766
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6615384615384615,
"acc_stderr": 0.023991500500313036,
"acc_norm": 0.6615384615384615,
"acc_norm_stderr": 0.023991500500313036
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.34814814814814815,
"acc_stderr": 0.029045600290616255,
"acc_norm": 0.34814814814814815,
"acc_norm_stderr": 0.029045600290616255
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6848739495798319,
"acc_stderr": 0.030176808288974337,
"acc_norm": 0.6848739495798319,
"acc_norm_stderr": 0.030176808288974337
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.038020397601079024,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.038020397601079024
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8458715596330275,
"acc_stderr": 0.015480826865374308,
"acc_norm": 0.8458715596330275,
"acc_norm_stderr": 0.015480826865374308
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5416666666666666,
"acc_stderr": 0.03398110890294636,
"acc_norm": 0.5416666666666666,
"acc_norm_stderr": 0.03398110890294636
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8088235294117647,
"acc_stderr": 0.027599174300640766,
"acc_norm": 0.8088235294117647,
"acc_norm_stderr": 0.027599174300640766
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7932489451476793,
"acc_stderr": 0.0263616516683891,
"acc_norm": 0.7932489451476793,
"acc_norm_stderr": 0.0263616516683891
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6860986547085202,
"acc_stderr": 0.031146796482972465,
"acc_norm": 0.6860986547085202,
"acc_norm_stderr": 0.031146796482972465
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7786259541984732,
"acc_stderr": 0.0364129708131373,
"acc_norm": 0.7786259541984732,
"acc_norm_stderr": 0.0364129708131373
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.03695980128098824,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.03695980128098824
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8055555555555556,
"acc_stderr": 0.038260763248848646,
"acc_norm": 0.8055555555555556,
"acc_norm_stderr": 0.038260763248848646
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7730061349693251,
"acc_stderr": 0.03291099578615769,
"acc_norm": 0.7730061349693251,
"acc_norm_stderr": 0.03291099578615769
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4732142857142857,
"acc_stderr": 0.047389751192741546,
"acc_norm": 0.4732142857142857,
"acc_norm_stderr": 0.047389751192741546
},
"harness|hendrycksTest-management|5": {
"acc": 0.7961165048543689,
"acc_stderr": 0.039891398595317706,
"acc_norm": 0.7961165048543689,
"acc_norm_stderr": 0.039891398595317706
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8803418803418803,
"acc_stderr": 0.02126271940040697,
"acc_norm": 0.8803418803418803,
"acc_norm_stderr": 0.02126271940040697
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768079
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8250319284802043,
"acc_stderr": 0.01358661921990334,
"acc_norm": 0.8250319284802043,
"acc_norm_stderr": 0.01358661921990334
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7485549132947977,
"acc_stderr": 0.02335736578587403,
"acc_norm": 0.7485549132947977,
"acc_norm_stderr": 0.02335736578587403
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.42905027932960893,
"acc_stderr": 0.016553287863116037,
"acc_norm": 0.42905027932960893,
"acc_norm_stderr": 0.016553287863116037
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7320261437908496,
"acc_stderr": 0.025360603796242557,
"acc_norm": 0.7320261437908496,
"acc_norm_stderr": 0.025360603796242557
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7106109324758842,
"acc_stderr": 0.025755865922632945,
"acc_norm": 0.7106109324758842,
"acc_norm_stderr": 0.025755865922632945
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7530864197530864,
"acc_stderr": 0.02399350170904211,
"acc_norm": 0.7530864197530864,
"acc_norm_stderr": 0.02399350170904211
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.475177304964539,
"acc_stderr": 0.029790719243829727,
"acc_norm": 0.475177304964539,
"acc_norm_stderr": 0.029790719243829727
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4576271186440678,
"acc_stderr": 0.012724296550980188,
"acc_norm": 0.4576271186440678,
"acc_norm_stderr": 0.012724296550980188
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6727941176470589,
"acc_stderr": 0.028501452860396556,
"acc_norm": 0.6727941176470589,
"acc_norm_stderr": 0.028501452860396556
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6683006535947712,
"acc_stderr": 0.01904748523936038,
"acc_norm": 0.6683006535947712,
"acc_norm_stderr": 0.01904748523936038
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6818181818181818,
"acc_stderr": 0.04461272175910509,
"acc_norm": 0.6818181818181818,
"acc_norm_stderr": 0.04461272175910509
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7428571428571429,
"acc_stderr": 0.02797982353874455,
"acc_norm": 0.7428571428571429,
"acc_norm_stderr": 0.02797982353874455
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.835820895522388,
"acc_stderr": 0.02619392354445412,
"acc_norm": 0.835820895522388,
"acc_norm_stderr": 0.02619392354445412
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.83,
"acc_stderr": 0.0377525168068637,
"acc_norm": 0.83,
"acc_norm_stderr": 0.0377525168068637
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5301204819277109,
"acc_stderr": 0.03885425420866767,
"acc_norm": 0.5301204819277109,
"acc_norm_stderr": 0.03885425420866767
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8128654970760234,
"acc_stderr": 0.02991312723236804,
"acc_norm": 0.8128654970760234,
"acc_norm_stderr": 0.02991312723236804
},
"harness|truthfulqa:mc|0": {
"mc1": 0.4724602203182375,
"mc1_stderr": 0.017476930190712187,
"mc2": 0.6184735043628918,
"mc2_stderr": 0.015107906651203224
},
"harness|winogrande|5": {
"acc": 0.8034727703235991,
"acc_stderr": 0.011168120593569562
},
"harness|gsm8k|5": {
"acc": 0.7225170583775588,
"acc_stderr": 0.01233344758104754
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_chasedreaminf__Dream-7B-slerp | [
"region:us"
] | 2024-02-16T20:07:47+00:00 | {"pretty_name": "Evaluation run of chasedreaminf/Dream-7B-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [chasedreaminf/Dream-7B-slerp](https://huggingface.co/chasedreaminf/Dream-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_chasedreaminf__Dream-7B-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T20:05:27.586603](https://huggingface.co/datasets/open-llm-leaderboard/details_chasedreaminf__Dream-7B-slerp/blob/main/results_2024-02-16T20-05-27.586603.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6507213038118388,\n \"acc_stderr\": 0.03214268781825063,\n \"acc_norm\": 0.6503644948036131,\n \"acc_norm_stderr\": 0.03280953874341137,\n \"mc1\": 0.4724602203182375,\n \"mc1_stderr\": 0.017476930190712187,\n \"mc2\": 0.6184735043628918,\n \"mc2_stderr\": 0.015107906651203224\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6663822525597269,\n \"acc_stderr\": 0.013778687054176538,\n \"acc_norm\": 0.6851535836177475,\n \"acc_norm_stderr\": 0.013572657703084948\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6787492531368253,\n \"acc_stderr\": 0.004660025270817022,\n \"acc_norm\": 0.8634734116709819,\n \"acc_norm_stderr\": 0.0034264517445078474\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.0378272898086547,\n \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.0378272898086547\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.02804918631569525,\n \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.02804918631569525\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.45098039215686275,\n \"acc_stderr\": 0.049512182523962625,\n \"acc_norm\": 0.45098039215686275,\n \"acc_norm_stderr\": 0.049512182523962625\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5957446808510638,\n \"acc_stderr\": 0.032081157507886836,\n \"acc_norm\": 0.5957446808510638,\n \"acc_norm_stderr\": 0.032081157507886836\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.02544636563440679,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.02544636563440679\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.02860620428922987,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.02860620428922987\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.022473253332768766,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.022473253332768766\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6615384615384615,\n \"acc_stderr\": 0.023991500500313036,\n \"acc_norm\": 0.6615384615384615,\n \"acc_norm_stderr\": 0.023991500500313036\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616255,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374308,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374308\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5416666666666666,\n \"acc_stderr\": 0.03398110890294636,\n \"acc_norm\": 0.5416666666666666,\n \"acc_norm_stderr\": 0.03398110890294636\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8088235294117647,\n \"acc_stderr\": 0.027599174300640766,\n \"acc_norm\": 0.8088235294117647,\n \"acc_norm_stderr\": 0.027599174300640766\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7932489451476793,\n \"acc_stderr\": 0.0263616516683891,\n \"acc_norm\": 0.7932489451476793,\n \"acc_norm_stderr\": 0.0263616516683891\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.0364129708131373,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.0364129708131373\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.02126271940040697,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.02126271940040697\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.01358661921990334,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.01358661921990334\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7485549132947977,\n \"acc_stderr\": 0.02335736578587403,\n \"acc_norm\": 0.7485549132947977,\n \"acc_norm_stderr\": 0.02335736578587403\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.42905027932960893,\n \"acc_stderr\": 0.016553287863116037,\n \"acc_norm\": 0.42905027932960893,\n \"acc_norm_stderr\": 0.016553287863116037\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7320261437908496,\n \"acc_stderr\": 0.025360603796242557,\n \"acc_norm\": 0.7320261437908496,\n \"acc_norm_stderr\": 0.025360603796242557\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.02399350170904211,\n \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.02399350170904211\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.475177304964539,\n \"acc_stderr\": 0.029790719243829727,\n \"acc_norm\": 0.475177304964539,\n \"acc_norm_stderr\": 0.029790719243829727\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4576271186440678,\n \"acc_stderr\": 0.012724296550980188,\n \"acc_norm\": 0.4576271186440678,\n \"acc_norm_stderr\": 0.012724296550980188\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.028501452860396556,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.028501452860396556\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.01904748523936038,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.01904748523936038\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.02619392354445412,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.02619392354445412\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.02991312723236804,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.02991312723236804\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4724602203182375,\n \"mc1_stderr\": 0.017476930190712187,\n \"mc2\": 0.6184735043628918,\n \"mc2_stderr\": 0.015107906651203224\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8034727703235991,\n \"acc_stderr\": 0.011168120593569562\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7225170583775588,\n \"acc_stderr\": 0.01233344758104754\n }\n}\n```", "repo_url": "https://huggingface.co/chasedreaminf/Dream-7B-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|arc:challenge|25_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|gsm8k|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hellaswag|10_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T20-05-27.586603.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["**/details_harness|winogrande|5_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T20-05-27.586603.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T20_05_27.586603", "path": ["results_2024-02-16T20-05-27.586603.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T20-05-27.586603.parquet"]}]}]} | 2024-02-16T20:08:11+00:00 |
85dcc431554f48343a29bbb35fd6617a1bd0fdb6 | # Dataset Card for "test_el_talar"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | ganader-ai-developers/test_el_talar | [
"region:us"
] | 2024-02-16T20:09:09+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "file_name", "dtype": "string"}, {"name": "cow_id", "dtype": "int64"}, {"name": "weight", "dtype": "string"}, {"name": "source", "dtype": "string"}, {"name": "breed", "dtype": "string"}, {"name": "sex", "dtype": "string"}, {"name": "orientation", "dtype": "string"}, {"name": "internal_cow_id", "dtype": "string"}, {"name": "vertical_distance_meters", "dtype": "float64"}, {"name": "horizontal_distance_meters", "dtype": "float64"}, {"name": "picture_quality", "dtype": "string"}, {"name": "id", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 595981, "num_examples": 2999}], "download_size": 41295, "dataset_size": 595981}} | 2024-02-16T20:09:12+00:00 |
0994b0ceeb5f421883b0836717e2a61d9dca2175 | # Dataset Card for "safety-data"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | DavideG/safety-data | [
"region:us"
] | 2024-02-16T20:12:08+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "input_ids", "sequence": "int32"}, {"name": "attention_mask", "sequence": "int8"}], "splits": [{"name": "train", "num_bytes": 1461769623, "num_examples": 458194}, {"name": "test", "num_bytes": 129475839, "num_examples": 41596}], "download_size": 312584599, "dataset_size": 1591245462}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T00:44:33+00:00 |
c3a438d2a9bd82f5e09ead41528a00ea1df7de50 | alisson40889/moreira | [
"license:openrail",
"region:us"
] | 2024-02-16T20:15:51+00:00 | {"license": "openrail"} | 2024-02-16T20:17:45+00:00 |
|
42e63c53aa060086b9371c9f3e1fd6cfe67724db | # Dataset Card for "MeltpoolsLabeled"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | MottsCoding/MeltpoolsLabeled | [
"region:us"
] | 2024-02-16T20:16:54+00:00 | {"dataset_info": {"features": [{"name": "images", "dtype": "image"}, {"name": "labels", "sequence": {"sequence": "int32"}}], "splits": [{"name": "train", "num_bytes": 51624539.0, "num_examples": 12}], "download_size": 15662464, "dataset_size": 51624539.0}} | 2024-02-16T20:30:45+00:00 |
e778255db9d748a03cf13f16dffb9cdc7bb2396a | AlisaMenekse/ErrorCategoriesBCP_50k_rows | [
"region:us"
] | 2024-02-16T20:27:47+00:00 | {} | 2024-02-16T20:33:20+00:00 |
|
e1ba44cfabd5ff2869e3379dbec03db617f60ea8 | patilkunal28/tinyllamadataset | [
"region:us"
] | 2024-02-16T20:28:31+00:00 | {} | 2024-02-16T20:29:54+00:00 |
|
9bf956e118f917bf1908f051946e03b753d30b99 |
# Dataset Card for Dataset Name
<!-- Provide a quick summary of the dataset. -->
This dataset is [alexandrainst/nst-da](https://huggingface.co/datasets/alexandrainst/nst-da) with pseudo labels (sentiment) added by [alexandrainst/da-sentiment-base](https://huggingface.co/alexandrainst/da-sentiment-base) with a filter on 24 text length
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
Be aware, sentiment distribution is off balanced.
Random sample of 0.1% of the dataset:

All audio is in 16000Hz
- **Curated by:** [overflowwwww](https://huggingface.co/datasets/overflowwwww)
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | overflowwwww/nst-da-sentiment-unbalanced | [
"license:cc0-1.0",
"region:us"
] | 2024-02-16T20:33:10+00:00 | {"license": "cc0-1.0"} | 2024-02-17T08:49:59+00:00 |
01713e3258097d4af8c081f526f3267e6075d6af | p-conscious/caravaggio | [
"region:us"
] | 2024-02-16T20:34:21+00:00 | {} | 2024-02-16T20:44:48+00:00 |
|
13325d6964ec511d041b9225bd9581ec6ffab903 | Tann-dev/conversation-zizi-sexting | [
"region:us"
] | 2024-02-16T20:41:29+00:00 | {"dataset_info": {"features": [{"name": "He", "dtype": "string"}, {"name": "She", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 250620, "num_examples": 2318}], "download_size": 55776, "dataset_size": 250620}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T20:41:30+00:00 |
|
060a8e98ca9c6e09d1c032f84859abf1ae4be110 | jpardue/github_datasets_issues | [
"region:us"
] | 2024-02-16T20:42:20+00:00 | {"dataset_info": {"features": [{"name": "url", "dtype": "string"}, {"name": "repository_url", "dtype": "string"}, {"name": "labels_url", "dtype": "string"}, {"name": "comments_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "number", "dtype": "int64"}, {"name": "title", "dtype": "string"}, {"name": "user", "struct": [{"name": "avatar_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "followers_url", "dtype": "string"}, {"name": "following_url", "dtype": "string"}, {"name": "gists_url", "dtype": "string"}, {"name": "gravatar_id", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "login", "dtype": "string"}, {"name": "node_id", "dtype": "string"}, {"name": "organizations_url", "dtype": "string"}, {"name": "received_events_url", "dtype": "string"}, {"name": "repos_url", "dtype": "string"}, {"name": "site_admin", "dtype": "bool"}, {"name": "starred_url", "dtype": "string"}, {"name": "subscriptions_url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "url", "dtype": "string"}]}, {"name": "labels", "list": [{"name": "color", "dtype": "string"}, {"name": "default", "dtype": "bool"}, {"name": "description", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "name", "dtype": "string"}, {"name": "node_id", "dtype": "string"}, {"name": "url", "dtype": "string"}]}, {"name": "state", "dtype": "string"}, {"name": "locked", "dtype": "bool"}, {"name": "assignee", "struct": [{"name": "avatar_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "followers_url", "dtype": "string"}, {"name": "following_url", "dtype": "string"}, {"name": "gists_url", "dtype": "string"}, {"name": "gravatar_id", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "login", "dtype": "string"}, {"name": "node_id", "dtype": "string"}, {"name": "organizations_url", "dtype": "string"}, {"name": "received_events_url", "dtype": "string"}, {"name": "repos_url", "dtype": "string"}, {"name": "site_admin", "dtype": "bool"}, {"name": "starred_url", "dtype": "string"}, {"name": "subscriptions_url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "url", "dtype": "string"}]}, {"name": "assignees", "list": [{"name": "avatar_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "followers_url", "dtype": "string"}, {"name": "following_url", "dtype": "string"}, {"name": "gists_url", "dtype": "string"}, {"name": "gravatar_id", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "login", "dtype": "string"}, {"name": "node_id", "dtype": "string"}, {"name": "organizations_url", "dtype": "string"}, {"name": "received_events_url", "dtype": "string"}, {"name": "repos_url", "dtype": "string"}, {"name": "site_admin", "dtype": "bool"}, {"name": "starred_url", "dtype": "string"}, {"name": "subscriptions_url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "url", "dtype": "string"}]}, {"name": "milestone", "struct": [{"name": "closed_at", "dtype": "string"}, {"name": "closed_issues", "dtype": "int64"}, {"name": "created_at", "dtype": "string"}, {"name": "creator", "struct": [{"name": "avatar_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "followers_url", "dtype": "string"}, {"name": "following_url", "dtype": "string"}, {"name": "gists_url", "dtype": "string"}, {"name": "gravatar_id", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "login", "dtype": "string"}, {"name": "node_id", "dtype": "string"}, {"name": "organizations_url", "dtype": "string"}, {"name": "received_events_url", "dtype": "string"}, {"name": "repos_url", "dtype": "string"}, {"name": "site_admin", "dtype": "bool"}, {"name": "starred_url", "dtype": "string"}, {"name": "subscriptions_url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "url", "dtype": "string"}]}, {"name": "description", "dtype": "string"}, {"name": "due_on", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "labels_url", "dtype": "string"}, {"name": "node_id", "dtype": "string"}, {"name": "number", "dtype": "int64"}, {"name": "open_issues", "dtype": "int64"}, {"name": "state", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "updated_at", "dtype": "string"}, {"name": "url", "dtype": "string"}]}, {"name": "comments", "sequence": "string"}, {"name": "created_at", "dtype": "timestamp[ns, tz=UTC]"}, {"name": "updated_at", "dtype": "timestamp[ns, tz=UTC]"}, {"name": "closed_at", "dtype": "timestamp[ns, tz=UTC]"}, {"name": "author_association", "dtype": "string"}, {"name": "active_lock_reason", "dtype": "float64"}, {"name": "draft", "dtype": "float64"}, {"name": "pull_request", "struct": [{"name": "diff_url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "merged_at", "dtype": "string"}, {"name": "patch_url", "dtype": "string"}, {"name": "url", "dtype": "string"}]}, {"name": "body", "dtype": "string"}, {"name": "reactions", "struct": [{"name": "+1", "dtype": "int64"}, {"name": "-1", "dtype": "int64"}, {"name": "confused", "dtype": "int64"}, {"name": "eyes", "dtype": "int64"}, {"name": "heart", "dtype": "int64"}, {"name": "hooray", "dtype": "int64"}, {"name": "laugh", "dtype": "int64"}, {"name": "rocket", "dtype": "int64"}, {"name": "total_count", "dtype": "int64"}, {"name": "url", "dtype": "string"}]}, {"name": "timeline_url", "dtype": "string"}, {"name": "performed_via_github_app", "dtype": "float64"}, {"name": "state_reason", "dtype": "string"}, {"name": "is_pull_request", "dtype": "bool"}], "splits": [{"name": "train", "num_bytes": 22999495, "num_examples": 3000}], "download_size": 6713970, "dataset_size": 22999495}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T20:42:23+00:00 |
|
f433335e2902a511c9c12def4226253d0aa6ca7d | irenesolaiman/hf_policy_materials | [
"region:us"
] | 2024-02-16T20:42:56+00:00 | {} | 2024-02-16T20:54:41+00:00 |
|
78b2cc6591af6e8904403f9b9146b69cfc0c8799 | MatsuoDochiai/DAVI | [
"license:openrail",
"region:us"
] | 2024-02-16T20:49:01+00:00 | {"license": "openrail"} | 2024-02-16T20:52:07+00:00 |
|
bea14112f6f1c730592106ff1ecdef746cfb471e | maghwa/OpenHermes-2-AR-10K-26-680k-690k | [
"region:us"
] | 2024-02-16T20:56:56+00:00 | {"dataset_info": {"features": [{"name": "language", "dtype": "null"}, {"name": "system_prompt", "dtype": "null"}, {"name": "conversations", "dtype": "string"}, {"name": "category", "dtype": "null"}, {"name": "id", "dtype": "null"}, {"name": "topic", "dtype": "null"}, {"name": "hash", "dtype": "null"}, {"name": "model_name", "dtype": "null"}, {"name": "idx", "dtype": "null"}, {"name": "skip_prompt_formatting", "dtype": "null"}, {"name": "model", "dtype": "null"}, {"name": "avatarUrl", "dtype": "null"}, {"name": "title", "dtype": "null"}, {"name": "views", "dtype": "float64"}, {"name": "source", "dtype": "string"}, {"name": "custom_instruction", "dtype": "null"}], "splits": [{"name": "train", "num_bytes": 25324996, "num_examples": 10001}], "download_size": 11501943, "dataset_size": 25324996}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T20:56:57+00:00 |
|
1c0453f5fb13260e606bb4fbc356cdfbd610e69b | Code-Refinement/utf_20_refs_pairs | [
"region:us"
] | 2024-02-16T20:58:50+00:00 | {"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "chosen", "dtype": "string"}, {"name": "rejected", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 8721872, "num_examples": 3054}, {"name": "test", "num_bytes": 2158658, "num_examples": 624}], "download_size": 1992384, "dataset_size": 10880530}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-16T20:58:53+00:00 |
|
dd819082e978d2462d51cffa42f7dbb6b45de8dd |
# Dataset Card for Evaluation run of liminerity/ultra0
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [liminerity/ultra0](https://huggingface.co/liminerity/ultra0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_liminerity__ultra0",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-16T21:00:48.786495](https://huggingface.co/datasets/open-llm-leaderboard/details_liminerity__ultra0/blob/main/results_2024-02-16T21-00-48.786495.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.3398058115621321,
"acc_stderr": 0.033239808655417924,
"acc_norm": 0.3409105453426057,
"acc_norm_stderr": 0.033965935374477625,
"mc1": 0.26193390452876375,
"mc1_stderr": 0.015392118805015027,
"mc2": 0.41485075799478666,
"mc2_stderr": 0.014670252998442896
},
"harness|arc:challenge|25": {
"acc": 0.3856655290102389,
"acc_stderr": 0.01422425097325718,
"acc_norm": 0.41467576791808874,
"acc_norm_stderr": 0.014397070564409174
},
"harness|hellaswag|10": {
"acc": 0.5078669587731528,
"acc_stderr": 0.004989163747650774,
"acc_norm": 0.6802429794861581,
"acc_norm_stderr": 0.004654291661255925
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.4148148148148148,
"acc_stderr": 0.04256193767901407,
"acc_norm": 0.4148148148148148,
"acc_norm_stderr": 0.04256193767901407
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.3092105263157895,
"acc_stderr": 0.037610708698674805,
"acc_norm": 0.3092105263157895,
"acc_norm_stderr": 0.037610708698674805
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.3584905660377358,
"acc_stderr": 0.02951470358398176,
"acc_norm": 0.3584905660377358,
"acc_norm_stderr": 0.02951470358398176
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.2638888888888889,
"acc_stderr": 0.03685651095897532,
"acc_norm": 0.2638888888888889,
"acc_norm_stderr": 0.03685651095897532
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909283,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.3352601156069364,
"acc_stderr": 0.03599586301247078,
"acc_norm": 0.3352601156069364,
"acc_norm_stderr": 0.03599586301247078
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.10784313725490197,
"acc_stderr": 0.03086428212206013,
"acc_norm": 0.10784313725490197,
"acc_norm_stderr": 0.03086428212206013
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.251063829787234,
"acc_stderr": 0.028346963777162466,
"acc_norm": 0.251063829787234,
"acc_norm_stderr": 0.028346963777162466
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.2631578947368421,
"acc_stderr": 0.04142439719489362,
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.04142439719489362
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.31724137931034485,
"acc_stderr": 0.03878352372138621,
"acc_norm": 0.31724137931034485,
"acc_norm_stderr": 0.03878352372138621
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.30158730158730157,
"acc_stderr": 0.0236369759961018,
"acc_norm": 0.30158730158730157,
"acc_norm_stderr": 0.0236369759961018
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.2619047619047619,
"acc_stderr": 0.03932537680392871,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.03932537680392871
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.3741935483870968,
"acc_stderr": 0.027528904299845787,
"acc_norm": 0.3741935483870968,
"acc_norm_stderr": 0.027528904299845787
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.2660098522167488,
"acc_stderr": 0.03108982600293752,
"acc_norm": 0.2660098522167488,
"acc_norm_stderr": 0.03108982600293752
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.44242424242424244,
"acc_stderr": 0.03878372113711274,
"acc_norm": 0.44242424242424244,
"acc_norm_stderr": 0.03878372113711274
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.45454545454545453,
"acc_stderr": 0.03547601494006936,
"acc_norm": 0.45454545454545453,
"acc_norm_stderr": 0.03547601494006936
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.37305699481865284,
"acc_stderr": 0.03490205592048574,
"acc_norm": 0.37305699481865284,
"acc_norm_stderr": 0.03490205592048574
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.26153846153846155,
"acc_stderr": 0.022282141204204426,
"acc_norm": 0.26153846153846155,
"acc_norm_stderr": 0.022282141204204426
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.26296296296296295,
"acc_stderr": 0.02684205787383371,
"acc_norm": 0.26296296296296295,
"acc_norm_stderr": 0.02684205787383371
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.28991596638655465,
"acc_stderr": 0.029472485833136084,
"acc_norm": 0.28991596638655465,
"acc_norm_stderr": 0.029472485833136084
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943343,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943343
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.3357798165137615,
"acc_stderr": 0.02024808139675293,
"acc_norm": 0.3357798165137615,
"acc_norm_stderr": 0.02024808139675293
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.3148148148148148,
"acc_stderr": 0.03167468706828979,
"acc_norm": 0.3148148148148148,
"acc_norm_stderr": 0.03167468706828979
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.4068627450980392,
"acc_stderr": 0.03447891136353382,
"acc_norm": 0.4068627450980392,
"acc_norm_stderr": 0.03447891136353382
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.45569620253164556,
"acc_stderr": 0.03241920684693334,
"acc_norm": 0.45569620253164556,
"acc_norm_stderr": 0.03241920684693334
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.3542600896860987,
"acc_stderr": 0.03210062154134986,
"acc_norm": 0.3542600896860987,
"acc_norm_stderr": 0.03210062154134986
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.37404580152671757,
"acc_stderr": 0.04243869242230524,
"acc_norm": 0.37404580152671757,
"acc_norm_stderr": 0.04243869242230524
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.5206611570247934,
"acc_stderr": 0.04560456086387235,
"acc_norm": 0.5206611570247934,
"acc_norm_stderr": 0.04560456086387235
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.04750077341199986,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.04750077341199986
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.3374233128834356,
"acc_stderr": 0.03714908409935574,
"acc_norm": 0.3374233128834356,
"acc_norm_stderr": 0.03714908409935574
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.23214285714285715,
"acc_stderr": 0.04007341809755806,
"acc_norm": 0.23214285714285715,
"acc_norm_stderr": 0.04007341809755806
},
"harness|hendrycksTest-management|5": {
"acc": 0.39805825242718446,
"acc_stderr": 0.04846748253977239,
"acc_norm": 0.39805825242718446,
"acc_norm_stderr": 0.04846748253977239
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.3717948717948718,
"acc_stderr": 0.031660988918880785,
"acc_norm": 0.3717948717948718,
"acc_norm_stderr": 0.031660988918880785
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.45977011494252873,
"acc_stderr": 0.017821994096933535,
"acc_norm": 0.45977011494252873,
"acc_norm_stderr": 0.017821994096933535
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.30346820809248554,
"acc_stderr": 0.02475241196091722,
"acc_norm": 0.30346820809248554,
"acc_norm_stderr": 0.02475241196091722
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.014333522059217892,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.014333522059217892
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.37254901960784315,
"acc_stderr": 0.02768418188330289,
"acc_norm": 0.37254901960784315,
"acc_norm_stderr": 0.02768418188330289
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.40836012861736337,
"acc_stderr": 0.02791705074848462,
"acc_norm": 0.40836012861736337,
"acc_norm_stderr": 0.02791705074848462
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.32407407407407407,
"acc_stderr": 0.02604176620271716,
"acc_norm": 0.32407407407407407,
"acc_norm_stderr": 0.02604176620271716
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.2872340425531915,
"acc_stderr": 0.026992199173064356,
"acc_norm": 0.2872340425531915,
"acc_norm_stderr": 0.026992199173064356
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.2757496740547588,
"acc_stderr": 0.011413813609160986,
"acc_norm": 0.2757496740547588,
"acc_norm_stderr": 0.011413813609160986
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.3382352941176471,
"acc_stderr": 0.02873932851398357,
"acc_norm": 0.3382352941176471,
"acc_norm_stderr": 0.02873932851398357
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.33169934640522875,
"acc_stderr": 0.019047485239360378,
"acc_norm": 0.33169934640522875,
"acc_norm_stderr": 0.019047485239360378
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.32727272727272727,
"acc_stderr": 0.04494290866252088,
"acc_norm": 0.32727272727272727,
"acc_norm_stderr": 0.04494290866252088
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.3183673469387755,
"acc_stderr": 0.029822533793982073,
"acc_norm": 0.3183673469387755,
"acc_norm_stderr": 0.029822533793982073
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.3283582089552239,
"acc_stderr": 0.033206858897443244,
"acc_norm": 0.3283582089552239,
"acc_norm_stderr": 0.033206858897443244
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-virology|5": {
"acc": 0.2891566265060241,
"acc_stderr": 0.03529486801511115,
"acc_norm": 0.2891566265060241,
"acc_norm_stderr": 0.03529486801511115
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.4093567251461988,
"acc_stderr": 0.03771283107626544,
"acc_norm": 0.4093567251461988,
"acc_norm_stderr": 0.03771283107626544
},
"harness|truthfulqa:mc|0": {
"mc1": 0.26193390452876375,
"mc1_stderr": 0.015392118805015027,
"mc2": 0.41485075799478666,
"mc2_stderr": 0.014670252998442896
},
"harness|winogrande|5": {
"acc": 0.6550907655880032,
"acc_stderr": 0.01335937980503369
},
"harness|gsm8k|5": {
"acc": 0.1607278241091736,
"acc_stderr": 0.010116708586037183
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_liminerity__ultra0 | [
"region:us"
] | 2024-02-16T21:03:10+00:00 | {"pretty_name": "Evaluation run of liminerity/ultra0", "dataset_summary": "Dataset automatically created during the evaluation run of model [liminerity/ultra0](https://huggingface.co/liminerity/ultra0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_liminerity__ultra0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T21:00:48.786495](https://huggingface.co/datasets/open-llm-leaderboard/details_liminerity__ultra0/blob/main/results_2024-02-16T21-00-48.786495.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.3398058115621321,\n \"acc_stderr\": 0.033239808655417924,\n \"acc_norm\": 0.3409105453426057,\n \"acc_norm_stderr\": 0.033965935374477625,\n \"mc1\": 0.26193390452876375,\n \"mc1_stderr\": 0.015392118805015027,\n \"mc2\": 0.41485075799478666,\n \"mc2_stderr\": 0.014670252998442896\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.3856655290102389,\n \"acc_stderr\": 0.01422425097325718,\n \"acc_norm\": 0.41467576791808874,\n \"acc_norm_stderr\": 0.014397070564409174\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5078669587731528,\n \"acc_stderr\": 0.004989163747650774,\n \"acc_norm\": 0.6802429794861581,\n \"acc_norm_stderr\": 0.004654291661255925\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4148148148148148,\n \"acc_stderr\": 0.04256193767901407,\n \"acc_norm\": 0.4148148148148148,\n \"acc_norm_stderr\": 0.04256193767901407\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.3092105263157895,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.3092105263157895,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.3584905660377358,\n \"acc_stderr\": 0.02951470358398176,\n \"acc_norm\": 0.3584905660377358,\n \"acc_norm_stderr\": 0.02951470358398176\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2638888888888889,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.2638888888888889,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3352601156069364,\n \"acc_stderr\": 0.03599586301247078,\n \"acc_norm\": 0.3352601156069364,\n \"acc_norm_stderr\": 0.03599586301247078\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.10784313725490197,\n \"acc_stderr\": 0.03086428212206013,\n \"acc_norm\": 0.10784313725490197,\n \"acc_norm_stderr\": 0.03086428212206013\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.251063829787234,\n \"acc_stderr\": 0.028346963777162466,\n \"acc_norm\": 0.251063829787234,\n \"acc_norm_stderr\": 0.028346963777162466\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.04142439719489362,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.04142439719489362\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.31724137931034485,\n \"acc_stderr\": 0.03878352372138621,\n \"acc_norm\": 0.31724137931034485,\n \"acc_norm_stderr\": 0.03878352372138621\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.30158730158730157,\n \"acc_stderr\": 0.0236369759961018,\n \"acc_norm\": 0.30158730158730157,\n \"acc_norm_stderr\": 0.0236369759961018\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2619047619047619,\n \"acc_stderr\": 0.03932537680392871,\n \"acc_norm\": 0.2619047619047619,\n \"acc_norm_stderr\": 0.03932537680392871\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.3741935483870968,\n \"acc_stderr\": 0.027528904299845787,\n \"acc_norm\": 0.3741935483870968,\n \"acc_norm_stderr\": 0.027528904299845787\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2660098522167488,\n \"acc_stderr\": 0.03108982600293752,\n \"acc_norm\": 0.2660098522167488,\n \"acc_norm_stderr\": 0.03108982600293752\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.44242424242424244,\n \"acc_stderr\": 0.03878372113711274,\n \"acc_norm\": 0.44242424242424244,\n \"acc_norm_stderr\": 0.03878372113711274\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.45454545454545453,\n \"acc_stderr\": 0.03547601494006936,\n \"acc_norm\": 0.45454545454545453,\n \"acc_norm_stderr\": 0.03547601494006936\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.37305699481865284,\n \"acc_stderr\": 0.03490205592048574,\n \"acc_norm\": 0.37305699481865284,\n \"acc_norm_stderr\": 0.03490205592048574\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.26153846153846155,\n \"acc_stderr\": 0.022282141204204426,\n \"acc_norm\": 0.26153846153846155,\n \"acc_norm_stderr\": 0.022282141204204426\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.02684205787383371,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.02684205787383371\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.28991596638655465,\n \"acc_stderr\": 0.029472485833136084,\n \"acc_norm\": 0.28991596638655465,\n \"acc_norm_stderr\": 0.029472485833136084\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.3357798165137615,\n \"acc_stderr\": 0.02024808139675293,\n \"acc_norm\": 0.3357798165137615,\n \"acc_norm_stderr\": 0.02024808139675293\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.03167468706828979,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.03167468706828979\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.4068627450980392,\n \"acc_stderr\": 0.03447891136353382,\n \"acc_norm\": 0.4068627450980392,\n \"acc_norm_stderr\": 0.03447891136353382\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.45569620253164556,\n \"acc_stderr\": 0.03241920684693334,\n \"acc_norm\": 0.45569620253164556,\n \"acc_norm_stderr\": 0.03241920684693334\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3542600896860987,\n \"acc_stderr\": 0.03210062154134986,\n \"acc_norm\": 0.3542600896860987,\n \"acc_norm_stderr\": 0.03210062154134986\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.37404580152671757,\n \"acc_stderr\": 0.04243869242230524,\n \"acc_norm\": 0.37404580152671757,\n \"acc_norm_stderr\": 0.04243869242230524\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.5206611570247934,\n \"acc_stderr\": 0.04560456086387235,\n \"acc_norm\": 0.5206611570247934,\n \"acc_norm_stderr\": 0.04560456086387235\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.04750077341199986,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.04750077341199986\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.3374233128834356,\n \"acc_stderr\": 0.03714908409935574,\n \"acc_norm\": 0.3374233128834356,\n \"acc_norm_stderr\": 0.03714908409935574\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.23214285714285715,\n \"acc_stderr\": 0.04007341809755806,\n \"acc_norm\": 0.23214285714285715,\n \"acc_norm_stderr\": 0.04007341809755806\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.39805825242718446,\n \"acc_stderr\": 0.04846748253977239,\n \"acc_norm\": 0.39805825242718446,\n \"acc_norm_stderr\": 0.04846748253977239\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.3717948717948718,\n \"acc_stderr\": 0.031660988918880785,\n \"acc_norm\": 0.3717948717948718,\n \"acc_norm_stderr\": 0.031660988918880785\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.45977011494252873,\n \"acc_stderr\": 0.017821994096933535,\n \"acc_norm\": 0.45977011494252873,\n \"acc_norm_stderr\": 0.017821994096933535\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.30346820809248554,\n \"acc_stderr\": 0.02475241196091722,\n \"acc_norm\": 0.30346820809248554,\n \"acc_norm_stderr\": 0.02475241196091722\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217892,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217892\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.02768418188330289,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.02768418188330289\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.40836012861736337,\n \"acc_stderr\": 0.02791705074848462,\n \"acc_norm\": 0.40836012861736337,\n \"acc_norm_stderr\": 0.02791705074848462\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.32407407407407407,\n \"acc_stderr\": 0.02604176620271716,\n \"acc_norm\": 0.32407407407407407,\n \"acc_norm_stderr\": 0.02604176620271716\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2872340425531915,\n \"acc_stderr\": 0.026992199173064356,\n \"acc_norm\": 0.2872340425531915,\n \"acc_norm_stderr\": 0.026992199173064356\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2757496740547588,\n \"acc_stderr\": 0.011413813609160986,\n \"acc_norm\": 0.2757496740547588,\n \"acc_norm_stderr\": 0.011413813609160986\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.3382352941176471,\n \"acc_stderr\": 0.02873932851398357,\n \"acc_norm\": 0.3382352941176471,\n \"acc_norm_stderr\": 0.02873932851398357\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.33169934640522875,\n \"acc_stderr\": 0.019047485239360378,\n \"acc_norm\": 0.33169934640522875,\n \"acc_norm_stderr\": 0.019047485239360378\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.32727272727272727,\n \"acc_stderr\": 0.04494290866252088,\n \"acc_norm\": 0.32727272727272727,\n \"acc_norm_stderr\": 0.04494290866252088\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.3183673469387755,\n \"acc_stderr\": 0.029822533793982073,\n \"acc_norm\": 0.3183673469387755,\n \"acc_norm_stderr\": 0.029822533793982073\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.3283582089552239,\n \"acc_stderr\": 0.033206858897443244,\n \"acc_norm\": 0.3283582089552239,\n \"acc_norm_stderr\": 0.033206858897443244\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2891566265060241,\n \"acc_stderr\": 0.03529486801511115,\n \"acc_norm\": 0.2891566265060241,\n \"acc_norm_stderr\": 0.03529486801511115\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.4093567251461988,\n \"acc_stderr\": 0.03771283107626544,\n \"acc_norm\": 0.4093567251461988,\n \"acc_norm_stderr\": 0.03771283107626544\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.26193390452876375,\n \"mc1_stderr\": 0.015392118805015027,\n \"mc2\": 0.41485075799478666,\n \"mc2_stderr\": 0.014670252998442896\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6550907655880032,\n \"acc_stderr\": 0.01335937980503369\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1607278241091736,\n \"acc_stderr\": 0.010116708586037183\n }\n}\n```", "repo_url": "https://huggingface.co/liminerity/ultra0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|arc:challenge|25_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|gsm8k|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hellaswag|10_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T21-00-48.786495.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["**/details_harness|winogrande|5_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T21-00-48.786495.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T21_00_48.786495", "path": ["results_2024-02-16T21-00-48.786495.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T21-00-48.786495.parquet"]}]}]} | 2024-02-16T21:03:33+00:00 |
8f08f02cd47ef351bb11f190a3d9d56f2dcc9e9d | alisson40889/cic | [
"license:openrail",
"region:us"
] | 2024-02-16T21:19:58+00:00 | {"license": "openrail"} | 2024-02-16T21:21:01+00:00 |
|
79a6996a6433d614c234b3d374d574cb91ffd0f4 | <!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Data Card</title>
<link href="https://fonts.googleapis.com/css2?family=Quicksand:wght@400;500;600&display=swap" rel="stylesheet">
<style>
body {
font-family: 'Quicksand', sans-serif;
background-color: #1A202C;
color: #D8DEE9;
margin: 0;
padding: 0; /* Remove default padding */
font-size: 16px;
background: linear-gradient(135deg, #2E3440 0%, #1A202C 100%);
}
p {
padding-left: 10px
}
.container {
width: 100%;
margin: auto;
background-color: rgb(255 255 255 / 1%);
padding: 20px 30px 40px; /* Add padding below the image only */
padding-right: 32px;
border-radius: 12px;
box-shadow: 0 4px 10px rgba(0, 0, 0, 0.2);
backdrop-filter: blur(10px);
border: 1px solid rgba(255, 255, 255, 0.05);
}
.header {
display: flex;
align-items: center;
justify-content: space-between;
gap: 20px;
}
img {
border-radius: 10px 10px 0 0!important;
padding-left: 0px !important;
}
.header h1 {
font-size: 28px;
color: #ECEFF4;
margin: 0;
text-shadow: 2px 2px 4px rgba(0, 0, 0, 0.3);
}
.info {
background-color: rgba(255, 255, 255, 0.05);
color: #AEBAC7;
border-radius: 12px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2);
font-size: 14px;
line-height: 1.6;
margin-left: 5px;
overflow-x: auto;
margin-top: 20px; /* Adjusted margin */
border: 1px solid rgba(255, 255, 255, 0.05);
transition: background-color 0.6s ease; /* Smooth transition over 0.5 seconds */
}
.info:hover {
}
.info img {
width: 100%;
border-radius: 10px 10px 0 0;
margin-top: -20px; /* Negative margin to overlap container margin */
}
a {
color: #88C0D0;
text-decoration: none;
transition: color 0.3s ease;
position: relative;
}
a:hover {
color: #A3BE8C;
text-decoration: none;
}
a::before {
content: '';
position: absolute;
width: 100%;
height: 2px;
bottom: 0;
left: 0;
background-color: #A3BE8C;
visibility: hidden;
transform: scaleX(0);
transition: all 0.3s ease-in-out;
}
a:hover::before {
visibility: visible;
transform: scaleX(1);
}
.button {
display: inline-block;
background-color: #5E81AC;
color: #E5E9F0;
padding: 10px 20px;
border-radius: 5px;
cursor: pointer;
text-decoration: none;
transition: background-color 0.3s ease;
}
.button:hover {
background-color: #81A1C1;
}
</style>
</head>
<body>
<div class="container">
<div class="header">
<h1>Aether Dataset</h1>
</div>
<div class="info">
<img src="https://cdn-uploads.huggingface.co/production/uploads/64545af5ec40bbbd01242ca6/N4UWofDAapZ_kCraMuQDJ.webp" style="border-radius: 10px;">
<p><strong>Creator:</strong> <a href="https://huggingface.co/Steelskull" target="_blank">SteelSkull</a></p>
<p><strong>Community Organization:</strong> <a href="https://huggingface.co/ConvexAI" target="_blank">ConvexAI</a></p>
<p><strong>Discord:</strong> <a href="https://discord.gg/yYqmNmg7Wj" target="_blank">Join us on Discord</a></p>
</head>
<body>
<div>
<div>
<p><strong>About Aether:</strong> The Aether dataset.</p>
<p>rebuilt script, new dataset</p>
<p>from 1.2.2 to 1.5, changed datasets, added two.</p>
<p> version v1.5 is a rework of the human -> gpt conversations and added system and tool columns
<p><strong>Source Datasets:</strong></p>
<ul>
<li>grimulkan/bluemoon_Karen_cleaned</li>
<li>Doctor-Shotgun/no-robots-sharegpt</li>
<li>Locutusque/hercules-v2.5</li>
<li>jondurbin/airoboros-3.2</li>
<li>openerotica/freedom-rp</li>
<li>teknium/OpenHermes-2.5</li>
<li>Doctor-Shotgun/capybara-sharegpt</li>
<li>KaraKaraWitch/PIPPA-ShareGPT-formatted</li>
<li>Locutusque/bagel-clean-v0.3-shuffled</li>
</ul>
<p><strong>Phrases and Data Removed:</strong></p>
<p>To enhance the dataset's coherence and relevance across varied contexts, certain phrases have been selectively omitted. each dataset is run against a "keyed" list of phrases.
<p>Filtering Stats:
<p>Total Objects Removed: 72114
<p>
<p>Deduplication:
<p>Initial row count: 3296307
<p>Final row count: 2728791
<p>Rows removed: 567516
<p>Filter:
<ul>
<li>Couldn't help but</li>
<li>Can't resist</li>
<li>I'm sorry, but</li>
<li>As an AI</li>
<li>However, it is important to</li>
<li>Cannot provide</li>
<li>And others</li>
</ul>
</div>
</div>
</body> | Steelskull/Aether-v1.5 | [
"size_categories:1M<n<10M",
"language:en",
"license:apache-2.0",
"not-for-all-audiences",
"region:us"
] | 2024-02-16T21:22:50+00:00 | {"language": ["en"], "license": "apache-2.0", "size_categories": ["1M<n<10M"], "dataset_info": {"features": [{"name": "conversations", "list": [{"name": "from", "dtype": "string"}, {"name": "value", "dtype": "string"}]}, {"name": "system", "dtype": "string"}, {"name": "tools", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 4655376981, "num_examples": 2712289}], "download_size": 2446047146, "dataset_size": 4655376981}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "tags": ["not-for-all-audiences"]} | 2024-02-17T16:42:58+00:00 |
6464a0355369d77b73cccfeae91fd4c670dfa081 | TeraflopAI/Guam-Law | [
"region:us"
] | 2024-02-16T21:26:39+00:00 | {} | 2024-02-16T21:26:48+00:00 |
|
c991795be509290ddc42dcdb71a59d5f09dd9d4d |
# Dataset Card for Evaluation run of Kquant03/Samlagast-7B-laser-bf16
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [Kquant03/Samlagast-7B-laser-bf16](https://huggingface.co/Kquant03/Samlagast-7B-laser-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_Kquant03__Samlagast-7B-laser-bf16",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-16T21:27:55.439399](https://huggingface.co/datasets/open-llm-leaderboard/details_Kquant03__Samlagast-7B-laser-bf16/blob/main/results_2024-02-16T21-27-55.439399.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.649387957418536,
"acc_stderr": 0.03220384800343332,
"acc_norm": 0.6490741317673534,
"acc_norm_stderr": 0.03287747775346528,
"mc1": 0.594859241126071,
"mc1_stderr": 0.01718561172775337,
"mc2": 0.7316299369099223,
"mc2_stderr": 0.01462879068661053
},
"harness|arc:challenge|25": {
"acc": 0.7081911262798635,
"acc_stderr": 0.01328452529240351,
"acc_norm": 0.7286689419795221,
"acc_norm_stderr": 0.012993807727545796
},
"harness|hellaswag|10": {
"acc": 0.7167894841665007,
"acc_stderr": 0.004496369742132102,
"acc_norm": 0.8895638319059949,
"acc_norm_stderr": 0.003127920738394109
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.040723148118768364,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.040723148118768364
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6710526315789473,
"acc_stderr": 0.03823428969926605,
"acc_norm": 0.6710526315789473,
"acc_norm_stderr": 0.03823428969926605
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.64,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.64,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7018867924528301,
"acc_stderr": 0.02815283794249386,
"acc_norm": 0.7018867924528301,
"acc_norm_stderr": 0.02815283794249386
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7847222222222222,
"acc_stderr": 0.03437079344106135,
"acc_norm": 0.7847222222222222,
"acc_norm_stderr": 0.03437079344106135
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.653179190751445,
"acc_stderr": 0.036291466701596636,
"acc_norm": 0.653179190751445,
"acc_norm_stderr": 0.036291466701596636
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.38235294117647056,
"acc_stderr": 0.04835503696107223,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.04835503696107223
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768078
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5574468085106383,
"acc_stderr": 0.03246956919789958,
"acc_norm": 0.5574468085106383,
"acc_norm_stderr": 0.03246956919789958
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5087719298245614,
"acc_stderr": 0.047028804320496165,
"acc_norm": 0.5087719298245614,
"acc_norm_stderr": 0.047028804320496165
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5655172413793104,
"acc_stderr": 0.04130740879555498,
"acc_norm": 0.5655172413793104,
"acc_norm_stderr": 0.04130740879555498
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41534391534391535,
"acc_stderr": 0.025379524910778398,
"acc_norm": 0.41534391534391535,
"acc_norm_stderr": 0.025379524910778398
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.044444444444444495,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.044444444444444495
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7838709677419354,
"acc_stderr": 0.02341529343356853,
"acc_norm": 0.7838709677419354,
"acc_norm_stderr": 0.02341529343356853
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5073891625615764,
"acc_stderr": 0.035176035403610105,
"acc_norm": 0.5073891625615764,
"acc_norm_stderr": 0.035176035403610105
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7696969696969697,
"acc_stderr": 0.032876667586034906,
"acc_norm": 0.7696969696969697,
"acc_norm_stderr": 0.032876667586034906
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.803030303030303,
"acc_stderr": 0.028335609732463362,
"acc_norm": 0.803030303030303,
"acc_norm_stderr": 0.028335609732463362
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8860103626943006,
"acc_stderr": 0.022935144053919443,
"acc_norm": 0.8860103626943006,
"acc_norm_stderr": 0.022935144053919443
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.023901157979402534,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.023901157979402534
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3037037037037037,
"acc_stderr": 0.028037929969114993,
"acc_norm": 0.3037037037037037,
"acc_norm_stderr": 0.028037929969114993
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.03038835355188679,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.03038835355188679
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3841059602649007,
"acc_stderr": 0.03971301814719197,
"acc_norm": 0.3841059602649007,
"acc_norm_stderr": 0.03971301814719197
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8403669724770643,
"acc_stderr": 0.015703498348461763,
"acc_norm": 0.8403669724770643,
"acc_norm_stderr": 0.015703498348461763
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5324074074074074,
"acc_stderr": 0.03402801581358966,
"acc_norm": 0.5324074074074074,
"acc_norm_stderr": 0.03402801581358966
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8382352941176471,
"acc_stderr": 0.02584501798692692,
"acc_norm": 0.8382352941176471,
"acc_norm_stderr": 0.02584501798692692
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7848101265822784,
"acc_stderr": 0.02675082699467618,
"acc_norm": 0.7848101265822784,
"acc_norm_stderr": 0.02675082699467618
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6905829596412556,
"acc_stderr": 0.03102441174057221,
"acc_norm": 0.6905829596412556,
"acc_norm_stderr": 0.03102441174057221
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7862595419847328,
"acc_stderr": 0.0359546161177469,
"acc_norm": 0.7862595419847328,
"acc_norm_stderr": 0.0359546161177469
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.768595041322314,
"acc_stderr": 0.03849856098794088,
"acc_norm": 0.768595041322314,
"acc_norm_stderr": 0.03849856098794088
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.75,
"acc_stderr": 0.04186091791394607,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04186091791394607
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7484662576687117,
"acc_stderr": 0.03408997886857529,
"acc_norm": 0.7484662576687117,
"acc_norm_stderr": 0.03408997886857529
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.41964285714285715,
"acc_stderr": 0.04684099321077106,
"acc_norm": 0.41964285714285715,
"acc_norm_stderr": 0.04684099321077106
},
"harness|hendrycksTest-management|5": {
"acc": 0.7766990291262136,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.7766990291262136,
"acc_norm_stderr": 0.04123553189891431
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8717948717948718,
"acc_stderr": 0.021901905115073325,
"acc_norm": 0.8717948717948718,
"acc_norm_stderr": 0.021901905115073325
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.73,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8288633461047255,
"acc_stderr": 0.013468201614066306,
"acc_norm": 0.8288633461047255,
"acc_norm_stderr": 0.013468201614066306
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7312138728323699,
"acc_stderr": 0.023868003262500104,
"acc_norm": 0.7312138728323699,
"acc_norm_stderr": 0.023868003262500104
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4424581005586592,
"acc_stderr": 0.01661139368726858,
"acc_norm": 0.4424581005586592,
"acc_norm_stderr": 0.01661139368726858
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7189542483660131,
"acc_stderr": 0.025738854797818737,
"acc_norm": 0.7189542483660131,
"acc_norm_stderr": 0.025738854797818737
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.684887459807074,
"acc_stderr": 0.026385273703464492,
"acc_norm": 0.684887459807074,
"acc_norm_stderr": 0.026385273703464492
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7530864197530864,
"acc_stderr": 0.02399350170904211,
"acc_norm": 0.7530864197530864,
"acc_norm_stderr": 0.02399350170904211
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4929078014184397,
"acc_stderr": 0.02982449855912901,
"acc_norm": 0.4929078014184397,
"acc_norm_stderr": 0.02982449855912901
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.47327249022164275,
"acc_stderr": 0.01275197796767601,
"acc_norm": 0.47327249022164275,
"acc_norm_stderr": 0.01275197796767601
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6617647058823529,
"acc_stderr": 0.028739328513983572,
"acc_norm": 0.6617647058823529,
"acc_norm_stderr": 0.028739328513983572
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.018926082916083383,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.018926082916083383
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6545454545454545,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.6545454545454545,
"acc_norm_stderr": 0.04554619617541054
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7142857142857143,
"acc_stderr": 0.0289205832206756,
"acc_norm": 0.7142857142857143,
"acc_norm_stderr": 0.0289205832206756
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8258706467661692,
"acc_stderr": 0.026814951200421603,
"acc_norm": 0.8258706467661692,
"acc_norm_stderr": 0.026814951200421603
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.84,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.84,
"acc_norm_stderr": 0.03684529491774709
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5662650602409639,
"acc_stderr": 0.03858158940685516,
"acc_norm": 0.5662650602409639,
"acc_norm_stderr": 0.03858158940685516
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8421052631578947,
"acc_stderr": 0.027966785859160893,
"acc_norm": 0.8421052631578947,
"acc_norm_stderr": 0.027966785859160893
},
"harness|truthfulqa:mc|0": {
"mc1": 0.594859241126071,
"mc1_stderr": 0.01718561172775337,
"mc2": 0.7316299369099223,
"mc2_stderr": 0.01462879068661053
},
"harness|winogrande|5": {
"acc": 0.8539857932123125,
"acc_stderr": 0.009924440374585244
},
"harness|gsm8k|5": {
"acc": 0.6565579984836998,
"acc_stderr": 0.013079933811800308
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_Kquant03__Samlagast-7B-laser-bf16 | [
"region:us"
] | 2024-02-16T21:30:17+00:00 | {"pretty_name": "Evaluation run of Kquant03/Samlagast-7B-laser-bf16", "dataset_summary": "Dataset automatically created during the evaluation run of model [Kquant03/Samlagast-7B-laser-bf16](https://huggingface.co/Kquant03/Samlagast-7B-laser-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Kquant03__Samlagast-7B-laser-bf16\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T21:27:55.439399](https://huggingface.co/datasets/open-llm-leaderboard/details_Kquant03__Samlagast-7B-laser-bf16/blob/main/results_2024-02-16T21-27-55.439399.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.649387957418536,\n \"acc_stderr\": 0.03220384800343332,\n \"acc_norm\": 0.6490741317673534,\n \"acc_norm_stderr\": 0.03287747775346528,\n \"mc1\": 0.594859241126071,\n \"mc1_stderr\": 0.01718561172775337,\n \"mc2\": 0.7316299369099223,\n \"mc2_stderr\": 0.01462879068661053\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7081911262798635,\n \"acc_stderr\": 0.01328452529240351,\n \"acc_norm\": 0.7286689419795221,\n \"acc_norm_stderr\": 0.012993807727545796\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7167894841665007,\n \"acc_stderr\": 0.004496369742132102,\n \"acc_norm\": 0.8895638319059949,\n \"acc_norm_stderr\": 0.003127920738394109\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.040723148118768364,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.040723148118768364\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6710526315789473,\n \"acc_stderr\": 0.03823428969926605,\n \"acc_norm\": 0.6710526315789473,\n \"acc_norm_stderr\": 0.03823428969926605\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.02815283794249386,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.02815283794249386\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7847222222222222,\n \"acc_stderr\": 0.03437079344106135,\n \"acc_norm\": 0.7847222222222222,\n \"acc_norm_stderr\": 0.03437079344106135\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.047028804320496165,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.047028804320496165\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41534391534391535,\n \"acc_stderr\": 0.025379524910778398,\n \"acc_norm\": 0.41534391534391535,\n \"acc_norm_stderr\": 0.025379524910778398\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.044444444444444495,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.044444444444444495\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.02341529343356853,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.02341529343356853\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.032876667586034906,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.032876667586034906\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919443,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402534,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402534\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3037037037037037,\n \"acc_stderr\": 0.028037929969114993,\n \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.028037929969114993\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.03038835355188679,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.03038835355188679\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3841059602649007,\n \"acc_stderr\": 0.03971301814719197,\n \"acc_norm\": 0.3841059602649007,\n \"acc_norm_stderr\": 0.03971301814719197\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8403669724770643,\n \"acc_stderr\": 0.015703498348461763,\n \"acc_norm\": 0.8403669724770643,\n \"acc_norm_stderr\": 0.015703498348461763\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5324074074074074,\n \"acc_stderr\": 0.03402801581358966,\n \"acc_norm\": 0.5324074074074074,\n \"acc_norm_stderr\": 0.03402801581358966\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.02584501798692692,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.02584501798692692\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7848101265822784,\n \"acc_stderr\": 0.02675082699467618,\n \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.02675082699467618\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.021901905115073325,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.021901905115073325\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8288633461047255,\n \"acc_stderr\": 0.013468201614066306,\n \"acc_norm\": 0.8288633461047255,\n \"acc_norm_stderr\": 0.013468201614066306\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7312138728323699,\n \"acc_stderr\": 0.023868003262500104,\n \"acc_norm\": 0.7312138728323699,\n \"acc_norm_stderr\": 0.023868003262500104\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4424581005586592,\n \"acc_stderr\": 0.01661139368726858,\n \"acc_norm\": 0.4424581005586592,\n \"acc_norm_stderr\": 0.01661139368726858\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818737,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818737\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.684887459807074,\n \"acc_stderr\": 0.026385273703464492,\n \"acc_norm\": 0.684887459807074,\n \"acc_norm_stderr\": 0.026385273703464492\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.02399350170904211,\n \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.02399350170904211\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47327249022164275,\n \"acc_stderr\": 0.01275197796767601,\n \"acc_norm\": 0.47327249022164275,\n \"acc_norm_stderr\": 0.01275197796767601\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.028739328513983572,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.028739328513983572\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.018926082916083383,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.018926082916083383\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.0289205832206756,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.0289205832206756\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685516,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685516\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160893,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160893\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.594859241126071,\n \"mc1_stderr\": 0.01718561172775337,\n \"mc2\": 0.7316299369099223,\n \"mc2_stderr\": 0.01462879068661053\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8539857932123125,\n \"acc_stderr\": 0.009924440374585244\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6565579984836998,\n \"acc_stderr\": 0.013079933811800308\n }\n}\n```", "repo_url": "https://huggingface.co/Kquant03/Samlagast-7B-laser-bf16", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|arc:challenge|25_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|gsm8k|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hellaswag|10_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T21-27-55.439399.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["**/details_harness|winogrande|5_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T21-27-55.439399.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T21_27_55.439399", "path": ["results_2024-02-16T21-27-55.439399.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T21-27-55.439399.parquet"]}]}]} | 2024-02-16T21:30:39+00:00 |
02074b8c1fb607db460c6a7150967d6172d073f2 | # Dataset Card for "el_talar_febrero24"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | ganader-ai-developers/el_talar_febrero24 | [
"region:us"
] | 2024-02-16T21:30:31+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "cow_id", "dtype": "int64"}, {"name": "weight", "dtype": "string"}, {"name": "source", "dtype": "string"}, {"name": "breed", "dtype": "string"}, {"name": "sex", "dtype": "string"}, {"name": "orientation", "dtype": "string"}, {"name": "internal_cow_id", "dtype": "string"}, {"name": "vertical_distance_meters", "dtype": "float64"}, {"name": "horizontal_distance_meters", "dtype": "float64"}, {"name": "picture_quality", "dtype": "string"}, {"name": "id", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 12063030422.09, "num_examples": 2999}], "download_size": 11177066135, "dataset_size": 12063030422.09}} | 2024-02-16T22:10:24+00:00 |
84bd2706f53f4480bdebf851cfe654fe010370b5 | # Dataset Card for "legal_chat"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | worldboss/legal_chat | [
"region:us"
] | 2024-02-16T21:31:14+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 7228218, "num_examples": 4394}], "download_size": 2605909, "dataset_size": 7228218}} | 2024-02-16T21:31:16+00:00 |
4bdd96f433a22309d59d0dab4fa2ac813cc8919d | # Dataset Card for "nia_faq_chat"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | worldboss/nia_faq_chat | [
"region:us"
] | 2024-02-16T21:32:41+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 36318, "num_examples": 66}], "download_size": 20689, "dataset_size": 36318}} | 2024-02-16T21:32:42+00:00 |
cce54d5379de2928bea79f54eeb2a682fa9f3006 | # Dataset Card for "qa_nia_faq_chat"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | worldboss/qa_nia_faq_chat | [
"region:us"
] | 2024-02-16T21:33:45+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 36450, "num_examples": 66}], "download_size": 20652, "dataset_size": 36450}} | 2024-02-16T21:49:29+00:00 |
e971a9c9a841b769a14f94c87caa8137c57fc335 | mcarthuradal/malawi | [
"region:us"
] | 2024-02-16T21:48:41+00:00 | {"dataset_info": [{"config_name": "booklets", "features": [{"name": "content", "dtype": "string"}, {"name": "index", "dtype": "int64"}], "splits": [{"name": "bk0", "num_bytes": 153783, "num_examples": 695}, {"name": "bk1", "num_bytes": 198433, "num_examples": 939}, {"name": "bk2", "num_bytes": 296363, "num_examples": 2064}, {"name": "bk3", "num_bytes": 173647, "num_examples": 740}, {"name": "bk4", "num_bytes": 45672, "num_examples": 271}, {"name": "bk5", "num_bytes": 453444, "num_examples": 515}], "download_size": 1561166, "dataset_size": 1774786}, {"config_name": "default", "features": [{"name": "Question Text", "dtype": "string"}, {"name": "Question Answer", "dtype": "string"}, {"name": "Reference Document", "dtype": "string"}, {"name": "Paragraph(s) Number", "dtype": "string"}, {"name": "Keywords", "dtype": "string"}, {"name": "ID", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 345558, "num_examples": 748}, {"name": "test", "num_bytes": 71682, "num_examples": 499}], "download_size": 200266, "dataset_size": 417240}], "configs": [{"config_name": "booklets", "data_files": [{"split": "bk0", "path": "booklets/bk0-*"}, {"split": "bk1", "path": "booklets/bk1-*"}, {"split": "bk2", "path": "booklets/bk2-*"}, {"split": "bk3", "path": "booklets/bk3-*"}, {"split": "bk4", "path": "booklets/bk4-*"}, {"split": "bk5", "path": "booklets/bk5-*"}]}, {"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-16T23:32:52+00:00 |
|
6df8e9756dbb24e9ff1f02b67f205ae75b344f81 | WeixiangYan/hu | [
"region:us"
] | 2024-02-16T21:57:28+00:00 | {} | 2024-02-16T21:57:28+00:00 |
|
6f9bbcdea17ad787b0c533e314cab48198dcf909 | ktrinh38/bandier | [
"region:us"
] | 2024-02-16T22:02:02+00:00 | {"dataset_info": {"features": [{"name": "folder", "dtype": "string"}, {"name": "path", "dtype": "string"}, {"name": "image", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 572697611.476, "num_examples": 1604}], "download_size": 564763352, "dataset_size": 572697611.476}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T22:02:48+00:00 |
|
bd2a467a67e29006ff626c249696bf5a0d2ae84f | rocioadlc/data3 | [
"license:apache-2.0",
"region:us"
] | 2024-02-16T22:03:11+00:00 | {"license": "apache-2.0"} | 2024-02-16T23:27:38+00:00 |
|
ff1f2be954d5968402ef7ed6929c71f073a7e58a |
# Dataset Card for Evaluation run of eren23/OGNO-7b-dpo-truthful
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [eren23/OGNO-7b-dpo-truthful](https://huggingface.co/eren23/OGNO-7b-dpo-truthful) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_eren23__OGNO-7b-dpo-truthful",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-16T22:05:00.872209](https://huggingface.co/datasets/open-llm-leaderboard/details_eren23__OGNO-7b-dpo-truthful/blob/main/results_2024-02-16T22-05-00.872209.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.652348491009575,
"acc_stderr": 0.03197996047742033,
"acc_norm": 0.6516420684516023,
"acc_norm_stderr": 0.03264932973384798,
"mc1": 0.6242350061199511,
"mc1_stderr": 0.01695458406021429,
"mc2": 0.7660822976380632,
"mc2_stderr": 0.013995111777693896
},
"harness|arc:challenge|25": {
"acc": 0.7141638225255973,
"acc_stderr": 0.013203196088537372,
"acc_norm": 0.7295221843003413,
"acc_norm_stderr": 0.012980954547659556
},
"harness|hellaswag|10": {
"acc": 0.7150965943039235,
"acc_stderr": 0.004504459553909766,
"acc_norm": 0.890161322445728,
"acc_norm_stderr": 0.0031204952388275576
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6370370370370371,
"acc_stderr": 0.04153948404742398,
"acc_norm": 0.6370370370370371,
"acc_norm_stderr": 0.04153948404742398
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7039473684210527,
"acc_stderr": 0.03715062154998904,
"acc_norm": 0.7039473684210527,
"acc_norm_stderr": 0.03715062154998904
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.64,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.64,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6981132075471698,
"acc_stderr": 0.02825420034443866,
"acc_norm": 0.6981132075471698,
"acc_norm_stderr": 0.02825420034443866
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7638888888888888,
"acc_stderr": 0.03551446610810826,
"acc_norm": 0.7638888888888888,
"acc_norm_stderr": 0.03551446610810826
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956911,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956911
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.57,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.57,
"acc_norm_stderr": 0.04975698519562428
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6589595375722543,
"acc_stderr": 0.03614665424180826,
"acc_norm": 0.6589595375722543,
"acc_norm_stderr": 0.03614665424180826
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.048971049527263666,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.048971049527263666
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5787234042553191,
"acc_stderr": 0.03227834510146268,
"acc_norm": 0.5787234042553191,
"acc_norm_stderr": 0.03227834510146268
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.47368421052631576,
"acc_stderr": 0.046970851366478626,
"acc_norm": 0.47368421052631576,
"acc_norm_stderr": 0.046970851366478626
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5448275862068965,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.5448275862068965,
"acc_norm_stderr": 0.04149886942192117
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41005291005291006,
"acc_stderr": 0.02533120243894443,
"acc_norm": 0.41005291005291006,
"acc_norm_stderr": 0.02533120243894443
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.49206349206349204,
"acc_stderr": 0.044715725362943486,
"acc_norm": 0.49206349206349204,
"acc_norm_stderr": 0.044715725362943486
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7870967741935484,
"acc_stderr": 0.02328766512726854,
"acc_norm": 0.7870967741935484,
"acc_norm_stderr": 0.02328766512726854
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5024630541871922,
"acc_stderr": 0.035179450386910616,
"acc_norm": 0.5024630541871922,
"acc_norm_stderr": 0.035179450386910616
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7696969696969697,
"acc_stderr": 0.0328766675860349,
"acc_norm": 0.7696969696969697,
"acc_norm_stderr": 0.0328766675860349
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8080808080808081,
"acc_stderr": 0.028057791672989017,
"acc_norm": 0.8080808080808081,
"acc_norm_stderr": 0.028057791672989017
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.917098445595855,
"acc_stderr": 0.01989934131572178,
"acc_norm": 0.917098445595855,
"acc_norm_stderr": 0.01989934131572178
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6615384615384615,
"acc_stderr": 0.023991500500313036,
"acc_norm": 0.6615384615384615,
"acc_norm_stderr": 0.023991500500313036
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.31851851851851853,
"acc_stderr": 0.02840653309060846,
"acc_norm": 0.31851851851851853,
"acc_norm_stderr": 0.02840653309060846
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6722689075630253,
"acc_stderr": 0.03048991141767323,
"acc_norm": 0.6722689075630253,
"acc_norm_stderr": 0.03048991141767323
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3708609271523179,
"acc_stderr": 0.03943966699183629,
"acc_norm": 0.3708609271523179,
"acc_norm_stderr": 0.03943966699183629
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8458715596330275,
"acc_stderr": 0.015480826865374303,
"acc_norm": 0.8458715596330275,
"acc_norm_stderr": 0.015480826865374303
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5046296296296297,
"acc_stderr": 0.03409825519163572,
"acc_norm": 0.5046296296296297,
"acc_norm_stderr": 0.03409825519163572
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8480392156862745,
"acc_stderr": 0.025195658428931792,
"acc_norm": 0.8480392156862745,
"acc_norm_stderr": 0.025195658428931792
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8059071729957806,
"acc_stderr": 0.025744902532290916,
"acc_norm": 0.8059071729957806,
"acc_norm_stderr": 0.025744902532290916
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6860986547085202,
"acc_stderr": 0.031146796482972465,
"acc_norm": 0.6860986547085202,
"acc_norm_stderr": 0.031146796482972465
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8091603053435115,
"acc_stderr": 0.03446513350752598,
"acc_norm": 0.8091603053435115,
"acc_norm_stderr": 0.03446513350752598
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.768595041322314,
"acc_stderr": 0.03849856098794088,
"acc_norm": 0.768595041322314,
"acc_norm_stderr": 0.03849856098794088
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7685185185185185,
"acc_stderr": 0.04077494709252627,
"acc_norm": 0.7685185185185185,
"acc_norm_stderr": 0.04077494709252627
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7852760736196319,
"acc_stderr": 0.032262193772867744,
"acc_norm": 0.7852760736196319,
"acc_norm_stderr": 0.032262193772867744
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.41964285714285715,
"acc_stderr": 0.04684099321077106,
"acc_norm": 0.41964285714285715,
"acc_norm_stderr": 0.04684099321077106
},
"harness|hendrycksTest-management|5": {
"acc": 0.7669902912621359,
"acc_stderr": 0.04185832598928315,
"acc_norm": 0.7669902912621359,
"acc_norm_stderr": 0.04185832598928315
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8803418803418803,
"acc_stderr": 0.021262719400406964,
"acc_norm": 0.8803418803418803,
"acc_norm_stderr": 0.021262719400406964
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8263090676883781,
"acc_stderr": 0.01354741565866226,
"acc_norm": 0.8263090676883781,
"acc_norm_stderr": 0.01354741565866226
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7369942196531792,
"acc_stderr": 0.023703099525258172,
"acc_norm": 0.7369942196531792,
"acc_norm_stderr": 0.023703099525258172
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.44581005586592176,
"acc_stderr": 0.016623998513333106,
"acc_norm": 0.44581005586592176,
"acc_norm_stderr": 0.016623998513333106
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.025646863097137897,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.025646863097137897
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7170418006430869,
"acc_stderr": 0.02558306248998481,
"acc_norm": 0.7170418006430869,
"acc_norm_stderr": 0.02558306248998481
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7376543209876543,
"acc_stderr": 0.024477222856135114,
"acc_norm": 0.7376543209876543,
"acc_norm_stderr": 0.024477222856135114
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.48936170212765956,
"acc_stderr": 0.02982074719142248,
"acc_norm": 0.48936170212765956,
"acc_norm_stderr": 0.02982074719142248
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.46936114732724904,
"acc_stderr": 0.012746237711716634,
"acc_norm": 0.46936114732724904,
"acc_norm_stderr": 0.012746237711716634
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6838235294117647,
"acc_stderr": 0.02824568739146292,
"acc_norm": 0.6838235294117647,
"acc_norm_stderr": 0.02824568739146292
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6683006535947712,
"acc_stderr": 0.019047485239360378,
"acc_norm": 0.6683006535947712,
"acc_norm_stderr": 0.019047485239360378
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7346938775510204,
"acc_stderr": 0.028263889943784593,
"acc_norm": 0.7346938775510204,
"acc_norm_stderr": 0.028263889943784593
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8407960199004975,
"acc_stderr": 0.025870646766169136,
"acc_norm": 0.8407960199004975,
"acc_norm_stderr": 0.025870646766169136
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.03588702812826371,
"acc_norm": 0.85,
"acc_norm_stderr": 0.03588702812826371
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5602409638554217,
"acc_stderr": 0.03864139923699121,
"acc_norm": 0.5602409638554217,
"acc_norm_stderr": 0.03864139923699121
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8421052631578947,
"acc_stderr": 0.027966785859160893,
"acc_norm": 0.8421052631578947,
"acc_norm_stderr": 0.027966785859160893
},
"harness|truthfulqa:mc|0": {
"mc1": 0.6242350061199511,
"mc1_stderr": 0.01695458406021429,
"mc2": 0.7660822976380632,
"mc2_stderr": 0.013995111777693896
},
"harness|winogrande|5": {
"acc": 0.8468823993685872,
"acc_stderr": 0.010120623252272955
},
"harness|gsm8k|5": {
"acc": 0.6899166034874905,
"acc_stderr": 0.012740305717376268
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_eren23__OGNO-7b-dpo-truthful | [
"region:us"
] | 2024-02-16T22:07:21+00:00 | {"pretty_name": "Evaluation run of eren23/OGNO-7b-dpo-truthful", "dataset_summary": "Dataset automatically created during the evaluation run of model [eren23/OGNO-7b-dpo-truthful](https://huggingface.co/eren23/OGNO-7b-dpo-truthful) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_eren23__OGNO-7b-dpo-truthful\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T22:05:00.872209](https://huggingface.co/datasets/open-llm-leaderboard/details_eren23__OGNO-7b-dpo-truthful/blob/main/results_2024-02-16T22-05-00.872209.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.652348491009575,\n \"acc_stderr\": 0.03197996047742033,\n \"acc_norm\": 0.6516420684516023,\n \"acc_norm_stderr\": 0.03264932973384798,\n \"mc1\": 0.6242350061199511,\n \"mc1_stderr\": 0.01695458406021429,\n \"mc2\": 0.7660822976380632,\n \"mc2_stderr\": 0.013995111777693896\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7141638225255973,\n \"acc_stderr\": 0.013203196088537372,\n \"acc_norm\": 0.7295221843003413,\n \"acc_norm_stderr\": 0.012980954547659556\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7150965943039235,\n \"acc_stderr\": 0.004504459553909766,\n \"acc_norm\": 0.890161322445728,\n \"acc_norm_stderr\": 0.0031204952388275576\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.04153948404742398,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.04153948404742398\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6981132075471698,\n \"acc_stderr\": 0.02825420034443866,\n \"acc_norm\": 0.6981132075471698,\n \"acc_norm_stderr\": 0.02825420034443866\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.03614665424180826,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.03614665424180826\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146268,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146268\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.02533120243894443,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.02533120243894443\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.49206349206349204,\n \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.49206349206349204,\n \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.02328766512726854,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.02328766512726854\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8080808080808081,\n \"acc_stderr\": 0.028057791672989017,\n \"acc_norm\": 0.8080808080808081,\n \"acc_norm_stderr\": 0.028057791672989017\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.917098445595855,\n \"acc_stderr\": 0.01989934131572178,\n \"acc_norm\": 0.917098445595855,\n \"acc_norm_stderr\": 0.01989934131572178\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6615384615384615,\n \"acc_stderr\": 0.023991500500313036,\n \"acc_norm\": 0.6615384615384615,\n \"acc_norm_stderr\": 0.023991500500313036\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374303,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374303\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5046296296296297,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.5046296296296297,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931792,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931792\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290916,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8091603053435115,\n \"acc_stderr\": 0.03446513350752598,\n \"acc_norm\": 0.8091603053435115,\n \"acc_norm_stderr\": 0.03446513350752598\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.032262193772867744,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.032262193772867744\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n \"acc_stderr\": 0.01354741565866226,\n \"acc_norm\": 0.8263090676883781,\n \"acc_norm_stderr\": 0.01354741565866226\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7369942196531792,\n \"acc_stderr\": 0.023703099525258172,\n \"acc_norm\": 0.7369942196531792,\n \"acc_norm_stderr\": 0.023703099525258172\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.44581005586592176,\n \"acc_stderr\": 0.016623998513333106,\n \"acc_norm\": 0.44581005586592176,\n \"acc_norm_stderr\": 0.016623998513333106\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137897,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137897\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7376543209876543,\n \"acc_stderr\": 0.024477222856135114,\n \"acc_norm\": 0.7376543209876543,\n \"acc_norm_stderr\": 0.024477222856135114\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.02982074719142248,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.02982074719142248\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46936114732724904,\n \"acc_stderr\": 0.012746237711716634,\n \"acc_norm\": 0.46936114732724904,\n \"acc_norm_stderr\": 0.012746237711716634\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.02824568739146292,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.02824568739146292\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.019047485239360378,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.019047485239360378\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.028263889943784593,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.028263889943784593\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699121,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699121\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160893,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160893\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.6242350061199511,\n \"mc1_stderr\": 0.01695458406021429,\n \"mc2\": 0.7660822976380632,\n \"mc2_stderr\": 0.013995111777693896\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8468823993685872,\n \"acc_stderr\": 0.010120623252272955\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6899166034874905,\n \"acc_stderr\": 0.012740305717376268\n }\n}\n```", "repo_url": "https://huggingface.co/eren23/OGNO-7b-dpo-truthful", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|arc:challenge|25_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|gsm8k|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hellaswag|10_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T22-05-00.872209.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["**/details_harness|winogrande|5_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T22-05-00.872209.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T22_05_00.872209", "path": ["results_2024-02-16T22-05-00.872209.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T22-05-00.872209.parquet"]}]}]} | 2024-02-16T22:07:44+00:00 |
59803e166472108b9cf7bb9b238a1fd20bd9038d | Willinton/Code_Llms_HiddenStates | [
"license:mit",
"region:us"
] | 2024-02-16T22:07:52+00:00 | {"license": "mit"} | 2024-02-16T22:07:52+00:00 |
|
c0d323fec188584af018f44600724873ab093316 | HelloKattyz/NveeBYHKattyz | [
"license:openrail",
"region:us"
] | 2024-02-16T22:08:44+00:00 | {"license": "openrail"} | 2024-02-17T00:03:17+00:00 |
|
0fcd5e98147b68f046e945e45cbb7f692cbe45e7 | maghwa/OpenHermes-2-AR-10K-27-690k-700k | [
"region:us"
] | 2024-02-16T22:09:22+00:00 | {"dataset_info": {"features": [{"name": "language", "dtype": "null"}, {"name": "system_prompt", "dtype": "null"}, {"name": "conversations", "dtype": "string"}, {"name": "category", "dtype": "null"}, {"name": "id", "dtype": "null"}, {"name": "topic", "dtype": "null"}, {"name": "hash", "dtype": "null"}, {"name": "model_name", "dtype": "null"}, {"name": "idx", "dtype": "null"}, {"name": "skip_prompt_formatting", "dtype": "null"}, {"name": "model", "dtype": "null"}, {"name": "avatarUrl", "dtype": "null"}, {"name": "title", "dtype": "null"}, {"name": "views", "dtype": "float64"}, {"name": "source", "dtype": "string"}, {"name": "custom_instruction", "dtype": "null"}], "splits": [{"name": "train", "num_bytes": 25177131, "num_examples": 10001}], "download_size": 11407724, "dataset_size": 25177131}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T22:09:24+00:00 |
|
4e0b567792cf98a977356eb25eeedb92f29ddf5f | HelloKattyz/SriyaBYHKattyz | [
"license:openrail",
"region:us"
] | 2024-02-16T22:11:07+00:00 | {"license": "openrail"} | 2024-02-16T23:52:18+00:00 |
|
c76b7425f5cd9c7e0e2696b1fcd61e04c6a17263 | vishruthnath/Calc-mawps-Tagged | [
"region:us"
] | 2024-02-16T22:23:33+00:00 | {"dataset_info": {"features": [{"name": "chain", "dtype": "string"}, {"name": "equation", "dtype": "string"}, {"name": "expression", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "num_unique_ops", "dtype": "int64"}, {"name": "operand", "sequence": "float64"}, {"name": "operand_tags", "sequence": "int64"}, {"name": "operation", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "question_split", "sequence": "string"}, {"name": "result", "dtype": "string"}, {"name": "result_float", "dtype": "float64"}, {"name": "valid", "dtype": "bool"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 680417, "num_examples": 1039}, {"name": "validation", "num_bytes": 656088, "num_examples": 1010}, {"name": "test", "num_bytes": 330740, "num_examples": 505}], "download_size": 463338, "dataset_size": 1667245}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T00:19:52+00:00 |
|
26a61a9b334af98e60f1ce59edc9e6a88b12d8e2 | kheopss/dpo_dataset_v1.0 | [
"region:us"
] | 2024-02-16T22:28:33+00:00 | {"dataset_info": {"features": [{"name": "rejected", "dtype": "string"}, {"name": "chosen", "dtype": "string"}, {"name": "prompt", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 4072494, "num_examples": 1000}], "download_size": 1588077, "dataset_size": 4072494}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T22:28:34+00:00 |
|
a6e61cdc7f550396aea15c2c8e429a00757c048f | # Dataset Card for "java_renaming_patch"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | thiomajid/java_renaming_patch | [
"region:us"
] | 2024-02-16T22:29:03+00:00 | {"dataset_info": {"features": [{"name": "commit_sha", "dtype": "string"}, {"name": "new_methods", "list": [{"name": "arguments", "sequence": "string"}, {"name": "filename", "dtype": "string"}, {"name": "implementation", "dtype": "string"}, {"name": "signature", "dtype": "string"}]}, {"name": "old_methods", "list": [{"name": "arguments", "sequence": "string"}, {"name": "filename", "dtype": "string"}, {"name": "implementation", "dtype": "string"}, {"name": "signature", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 794271, "num_examples": 74}], "download_size": 271079, "dataset_size": 794271}} | 2024-02-16T23:33:28+00:00 |
53ee6f9ad2dfccdcce0f92e3edbaa0a1c3e8c563 | alienit/ParSQuAD | [
"region:us"
] | 2024-02-16T22:35:05+00:00 | {} | 2024-02-16T22:55:51+00:00 |
|
4da15026d0ee3c3da545e38db89ad0a6a6a080ae | vishruthnath/Calc-svamp-Tagged | [
"region:us"
] | 2024-02-16T22:37:35+00:00 | {"dataset_info": {"features": [{"name": "chain", "dtype": "string"}, {"name": "equation", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "num_unique_ops", "dtype": "int64"}, {"name": "operand", "sequence": "float64"}, {"name": "operand_tags", "sequence": "int64"}, {"name": "operation", "dtype": "string"}, {"name": "problem_type", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "question_split", "sequence": "string"}, {"name": "result", "dtype": "string"}, {"name": "result_float", "dtype": "float64"}, {"name": "valid", "dtype": "bool"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "test", "num_bytes": 723143, "num_examples": 811}], "download_size": 156032, "dataset_size": 723143}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T00:20:01+00:00 |
|
8adc28cb398bc930e769f73f10c718af81671547 |
# Dataset Card for Evaluation run of fzzhang/mistralv1_gsm8k_merged_s
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [fzzhang/mistralv1_gsm8k_merged_s](https://huggingface.co/fzzhang/mistralv1_gsm8k_merged_s) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_fzzhang__mistralv1_gsm8k_merged_s",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-16T22:54:23.549032](https://huggingface.co/datasets/open-llm-leaderboard/details_fzzhang__mistralv1_gsm8k_merged_s/blob/main/results_2024-02-16T22-54-23.549032.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6168865618276048,
"acc_stderr": 0.03272942181052946,
"acc_norm": 0.620454228546075,
"acc_norm_stderr": 0.03338469630654732,
"mc1": 0.2729498164014688,
"mc1_stderr": 0.015594753632006526,
"mc2": 0.42426145726798214,
"mc2_stderr": 0.014425554324623623
},
"harness|arc:challenge|25": {
"acc": 0.5827645051194539,
"acc_stderr": 0.014409825518403084,
"acc_norm": 0.6203071672354948,
"acc_norm_stderr": 0.014182119866974872
},
"harness|hellaswag|10": {
"acc": 0.6467835092611034,
"acc_stderr": 0.004769924131304649,
"acc_norm": 0.8394742083250348,
"acc_norm_stderr": 0.0036634275361781595
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.04292596718256981,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.04292596718256981
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6381578947368421,
"acc_stderr": 0.039105257528497236,
"acc_norm": 0.6381578947368421,
"acc_norm_stderr": 0.039105257528497236
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.57,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.57,
"acc_norm_stderr": 0.049756985195624284
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6754716981132075,
"acc_stderr": 0.02881561571343211,
"acc_norm": 0.6754716981132075,
"acc_norm_stderr": 0.02881561571343211
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7083333333333334,
"acc_stderr": 0.038009680605548594,
"acc_norm": 0.7083333333333334,
"acc_norm_stderr": 0.038009680605548594
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6473988439306358,
"acc_stderr": 0.03643037168958548,
"acc_norm": 0.6473988439306358,
"acc_norm_stderr": 0.03643037168958548
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.45098039215686275,
"acc_stderr": 0.04951218252396262,
"acc_norm": 0.45098039215686275,
"acc_norm_stderr": 0.04951218252396262
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.77,
"acc_stderr": 0.042295258468165044,
"acc_norm": 0.77,
"acc_norm_stderr": 0.042295258468165044
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.4851063829787234,
"acc_stderr": 0.032671518489247764,
"acc_norm": 0.4851063829787234,
"acc_norm_stderr": 0.032671518489247764
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4298245614035088,
"acc_stderr": 0.046570472605949625,
"acc_norm": 0.4298245614035088,
"acc_norm_stderr": 0.046570472605949625
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5586206896551724,
"acc_stderr": 0.04137931034482758,
"acc_norm": 0.5586206896551724,
"acc_norm_stderr": 0.04137931034482758
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3783068783068783,
"acc_stderr": 0.02497695405315524,
"acc_norm": 0.3783068783068783,
"acc_norm_stderr": 0.02497695405315524
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.40476190476190477,
"acc_stderr": 0.04390259265377562,
"acc_norm": 0.40476190476190477,
"acc_norm_stderr": 0.04390259265377562
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7516129032258064,
"acc_stderr": 0.024580028921481003,
"acc_norm": 0.7516129032258064,
"acc_norm_stderr": 0.024580028921481003
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5024630541871922,
"acc_stderr": 0.03517945038691063,
"acc_norm": 0.5024630541871922,
"acc_norm_stderr": 0.03517945038691063
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7393939393939394,
"acc_stderr": 0.034277431758165236,
"acc_norm": 0.7393939393939394,
"acc_norm_stderr": 0.034277431758165236
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7828282828282829,
"acc_stderr": 0.02937661648494562,
"acc_norm": 0.7828282828282829,
"acc_norm_stderr": 0.02937661648494562
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8704663212435233,
"acc_stderr": 0.024233532297758733,
"acc_norm": 0.8704663212435233,
"acc_norm_stderr": 0.024233532297758733
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6230769230769231,
"acc_stderr": 0.024570975364225995,
"acc_norm": 0.6230769230769231,
"acc_norm_stderr": 0.024570975364225995
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.028742040903948485,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.028742040903948485
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6260504201680672,
"acc_stderr": 0.03142946637883708,
"acc_norm": 0.6260504201680672,
"acc_norm_stderr": 0.03142946637883708
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3443708609271523,
"acc_stderr": 0.038796870240733264,
"acc_norm": 0.3443708609271523,
"acc_norm_stderr": 0.038796870240733264
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7944954128440367,
"acc_stderr": 0.01732435232501601,
"acc_norm": 0.7944954128440367,
"acc_norm_stderr": 0.01732435232501601
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.44907407407407407,
"acc_stderr": 0.03392238405321616,
"acc_norm": 0.44907407407407407,
"acc_norm_stderr": 0.03392238405321616
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7549019607843137,
"acc_stderr": 0.030190282453501954,
"acc_norm": 0.7549019607843137,
"acc_norm_stderr": 0.030190282453501954
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7805907172995781,
"acc_stderr": 0.026939106581553945,
"acc_norm": 0.7805907172995781,
"acc_norm_stderr": 0.026939106581553945
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6502242152466368,
"acc_stderr": 0.03200736719484503,
"acc_norm": 0.6502242152466368,
"acc_norm_stderr": 0.03200736719484503
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7480916030534351,
"acc_stderr": 0.03807387116306086,
"acc_norm": 0.7480916030534351,
"acc_norm_stderr": 0.03807387116306086
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7520661157024794,
"acc_stderr": 0.039418975265163025,
"acc_norm": 0.7520661157024794,
"acc_norm_stderr": 0.039418975265163025
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7037037037037037,
"acc_stderr": 0.044143436668549335,
"acc_norm": 0.7037037037037037,
"acc_norm_stderr": 0.044143436668549335
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7791411042944786,
"acc_stderr": 0.03259177392742178,
"acc_norm": 0.7791411042944786,
"acc_norm_stderr": 0.03259177392742178
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.375,
"acc_stderr": 0.04595091388086298,
"acc_norm": 0.375,
"acc_norm_stderr": 0.04595091388086298
},
"harness|hendrycksTest-management|5": {
"acc": 0.7961165048543689,
"acc_stderr": 0.039891398595317706,
"acc_norm": 0.7961165048543689,
"acc_norm_stderr": 0.039891398595317706
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8547008547008547,
"acc_stderr": 0.0230866350868414,
"acc_norm": 0.8547008547008547,
"acc_norm_stderr": 0.0230866350868414
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8007662835249042,
"acc_stderr": 0.014283378044296413,
"acc_norm": 0.8007662835249042,
"acc_norm_stderr": 0.014283378044296413
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7283236994219653,
"acc_stderr": 0.023948512905468358,
"acc_norm": 0.7283236994219653,
"acc_norm_stderr": 0.023948512905468358
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4346368715083799,
"acc_stderr": 0.01657899743549671,
"acc_norm": 0.4346368715083799,
"acc_norm_stderr": 0.01657899743549671
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7320261437908496,
"acc_stderr": 0.025360603796242557,
"acc_norm": 0.7320261437908496,
"acc_norm_stderr": 0.025360603796242557
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7009646302250804,
"acc_stderr": 0.02600330111788514,
"acc_norm": 0.7009646302250804,
"acc_norm_stderr": 0.02600330111788514
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7283950617283951,
"acc_stderr": 0.02474862449053737,
"acc_norm": 0.7283950617283951,
"acc_norm_stderr": 0.02474862449053737
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.48936170212765956,
"acc_stderr": 0.02982074719142248,
"acc_norm": 0.48936170212765956,
"acc_norm_stderr": 0.02982074719142248
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4361147327249022,
"acc_stderr": 0.01266556813545533,
"acc_norm": 0.4361147327249022,
"acc_norm_stderr": 0.01266556813545533
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6286764705882353,
"acc_stderr": 0.02934980313976587,
"acc_norm": 0.6286764705882353,
"acc_norm_stderr": 0.02934980313976587
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6552287581699346,
"acc_stderr": 0.019228322018696644,
"acc_norm": 0.6552287581699346,
"acc_norm_stderr": 0.019228322018696644
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.04494290866252091,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.04494290866252091
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6857142857142857,
"acc_stderr": 0.02971932942241748,
"acc_norm": 0.6857142857142857,
"acc_norm_stderr": 0.02971932942241748
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8407960199004975,
"acc_stderr": 0.02587064676616914,
"acc_norm": 0.8407960199004975,
"acc_norm_stderr": 0.02587064676616914
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.87,
"acc_stderr": 0.03379976689896309,
"acc_norm": 0.87,
"acc_norm_stderr": 0.03379976689896309
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5120481927710844,
"acc_stderr": 0.03891364495835816,
"acc_norm": 0.5120481927710844,
"acc_norm_stderr": 0.03891364495835816
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8070175438596491,
"acc_stderr": 0.030267457554898458,
"acc_norm": 0.8070175438596491,
"acc_norm_stderr": 0.030267457554898458
},
"harness|truthfulqa:mc|0": {
"mc1": 0.2729498164014688,
"mc1_stderr": 0.015594753632006526,
"mc2": 0.42426145726798214,
"mc2_stderr": 0.014425554324623623
},
"harness|winogrande|5": {
"acc": 0.77663772691397,
"acc_stderr": 0.011705697565205193
},
"harness|gsm8k|5": {
"acc": 0.47687642153146326,
"acc_stderr": 0.013757748544245317
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_fzzhang__mistralv1_gsm8k_merged_s | [
"region:us"
] | 2024-02-16T22:56:43+00:00 | {"pretty_name": "Evaluation run of fzzhang/mistralv1_gsm8k_merged_s", "dataset_summary": "Dataset automatically created during the evaluation run of model [fzzhang/mistralv1_gsm8k_merged_s](https://huggingface.co/fzzhang/mistralv1_gsm8k_merged_s) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_fzzhang__mistralv1_gsm8k_merged_s\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T22:54:23.549032](https://huggingface.co/datasets/open-llm-leaderboard/details_fzzhang__mistralv1_gsm8k_merged_s/blob/main/results_2024-02-16T22-54-23.549032.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6168865618276048,\n \"acc_stderr\": 0.03272942181052946,\n \"acc_norm\": 0.620454228546075,\n \"acc_norm_stderr\": 0.03338469630654732,\n \"mc1\": 0.2729498164014688,\n \"mc1_stderr\": 0.015594753632006526,\n \"mc2\": 0.42426145726798214,\n \"mc2_stderr\": 0.014425554324623623\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5827645051194539,\n \"acc_stderr\": 0.014409825518403084,\n \"acc_norm\": 0.6203071672354948,\n \"acc_norm_stderr\": 0.014182119866974872\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6467835092611034,\n \"acc_stderr\": 0.004769924131304649,\n \"acc_norm\": 0.8394742083250348,\n \"acc_norm_stderr\": 0.0036634275361781595\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04292596718256981,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04292596718256981\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6381578947368421,\n \"acc_stderr\": 0.039105257528497236,\n \"acc_norm\": 0.6381578947368421,\n \"acc_norm_stderr\": 0.039105257528497236\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.02881561571343211,\n \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.02881561571343211\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7083333333333334,\n \"acc_stderr\": 0.038009680605548594,\n \"acc_norm\": 0.7083333333333334,\n \"acc_norm_stderr\": 0.038009680605548594\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.03643037168958548,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.03643037168958548\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.45098039215686275,\n \"acc_stderr\": 0.04951218252396262,\n \"acc_norm\": 0.45098039215686275,\n \"acc_norm_stderr\": 0.04951218252396262\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165044,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165044\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4851063829787234,\n \"acc_stderr\": 0.032671518489247764,\n \"acc_norm\": 0.4851063829787234,\n \"acc_norm_stderr\": 0.032671518489247764\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.046570472605949625,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.046570472605949625\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482758,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482758\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3783068783068783,\n \"acc_stderr\": 0.02497695405315524,\n \"acc_norm\": 0.3783068783068783,\n \"acc_norm_stderr\": 0.02497695405315524\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.04390259265377562,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.04390259265377562\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7516129032258064,\n \"acc_stderr\": 0.024580028921481003,\n \"acc_norm\": 0.7516129032258064,\n \"acc_norm_stderr\": 0.024580028921481003\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7393939393939394,\n \"acc_stderr\": 0.034277431758165236,\n \"acc_norm\": 0.7393939393939394,\n \"acc_norm_stderr\": 0.034277431758165236\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494562,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494562\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8704663212435233,\n \"acc_stderr\": 0.024233532297758733,\n \"acc_norm\": 0.8704663212435233,\n \"acc_norm_stderr\": 0.024233532297758733\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6230769230769231,\n \"acc_stderr\": 0.024570975364225995,\n \"acc_norm\": 0.6230769230769231,\n \"acc_norm_stderr\": 0.024570975364225995\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948485,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948485\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6260504201680672,\n \"acc_stderr\": 0.03142946637883708,\n \"acc_norm\": 0.6260504201680672,\n \"acc_norm_stderr\": 0.03142946637883708\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7944954128440367,\n \"acc_stderr\": 0.01732435232501601,\n \"acc_norm\": 0.7944954128440367,\n \"acc_norm_stderr\": 0.01732435232501601\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.44907407407407407,\n \"acc_stderr\": 0.03392238405321616,\n \"acc_norm\": 0.44907407407407407,\n \"acc_norm_stderr\": 0.03392238405321616\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.030190282453501954,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.030190282453501954\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6502242152466368,\n \"acc_stderr\": 0.03200736719484503,\n \"acc_norm\": 0.6502242152466368,\n \"acc_norm_stderr\": 0.03200736719484503\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306086,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306086\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7520661157024794,\n \"acc_stderr\": 0.039418975265163025,\n \"acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.039418975265163025\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7037037037037037,\n \"acc_stderr\": 0.044143436668549335,\n \"acc_norm\": 0.7037037037037037,\n \"acc_norm_stderr\": 0.044143436668549335\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.0230866350868414,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.0230866350868414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8007662835249042,\n \"acc_stderr\": 0.014283378044296413,\n \"acc_norm\": 0.8007662835249042,\n \"acc_norm_stderr\": 0.014283378044296413\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7283236994219653,\n \"acc_stderr\": 0.023948512905468358,\n \"acc_norm\": 0.7283236994219653,\n \"acc_norm_stderr\": 0.023948512905468358\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4346368715083799,\n \"acc_stderr\": 0.01657899743549671,\n \"acc_norm\": 0.4346368715083799,\n \"acc_norm_stderr\": 0.01657899743549671\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7320261437908496,\n \"acc_stderr\": 0.025360603796242557,\n \"acc_norm\": 0.7320261437908496,\n \"acc_norm_stderr\": 0.025360603796242557\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7009646302250804,\n \"acc_stderr\": 0.02600330111788514,\n \"acc_norm\": 0.7009646302250804,\n \"acc_norm_stderr\": 0.02600330111788514\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7283950617283951,\n \"acc_stderr\": 0.02474862449053737,\n \"acc_norm\": 0.7283950617283951,\n \"acc_norm_stderr\": 0.02474862449053737\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.02982074719142248,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.02982074719142248\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4361147327249022,\n \"acc_stderr\": 0.01266556813545533,\n \"acc_norm\": 0.4361147327249022,\n \"acc_norm_stderr\": 0.01266556813545533\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6286764705882353,\n \"acc_stderr\": 0.02934980313976587,\n \"acc_norm\": 0.6286764705882353,\n \"acc_norm_stderr\": 0.02934980313976587\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6552287581699346,\n \"acc_stderr\": 0.019228322018696644,\n \"acc_norm\": 0.6552287581699346,\n \"acc_norm_stderr\": 0.019228322018696644\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252091,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252091\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6857142857142857,\n \"acc_stderr\": 0.02971932942241748,\n \"acc_norm\": 0.6857142857142857,\n \"acc_norm_stderr\": 0.02971932942241748\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616914,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616914\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896309,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896309\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835816,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835816\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2729498164014688,\n \"mc1_stderr\": 0.015594753632006526,\n \"mc2\": 0.42426145726798214,\n \"mc2_stderr\": 0.014425554324623623\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.77663772691397,\n \"acc_stderr\": 0.011705697565205193\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.47687642153146326,\n \"acc_stderr\": 0.013757748544245317\n }\n}\n```", "repo_url": "https://huggingface.co/fzzhang/mistralv1_gsm8k_merged_s", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|arc:challenge|25_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|gsm8k|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hellaswag|10_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T22-54-23.549032.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["**/details_harness|winogrande|5_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T22-54-23.549032.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T22_54_23.549032", "path": ["results_2024-02-16T22-54-23.549032.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T22-54-23.549032.parquet"]}]}]} | 2024-02-16T22:57:09+00:00 |
9fdb090818045487d870dd594049a087ec643fd5 | yleo/aqua-binarized-1 | [
"region:us"
] | 2024-02-16T23:00:09+00:00 | {"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "chosen", "dtype": "string"}, {"name": "rejected", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 21307, "num_examples": 10}], "download_size": 30225, "dataset_size": 21307}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T23:00:12+00:00 |
Subsets and Splits